package cn.my.springHDFS.web;

import cn.my.springHDFS.domain.Plupload;
import cn.my.springHDFS.domain.SpiderStatus;
import cn.my.springHDFS.domain.User;
import cn.my.springHDFS.service.ManagementServiceImpl;
import cn.my.springHDFS.service.PluploadServiceImpl;
import cn.my.springHDFS.service.UserServiceImpl;
import cn.my.springHDFS.util.HDFS_Util;
import cn.my.springHDFS.util.ToolUtil;
import cn.my.springHDFS.util.WebHDFSProcessor;
import io.swagger.annotations.ApiOperation;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import sun.security.provider.ConfigFile;
import us.codecraft.webmagic.Spider;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.swing.filechooser.FileSystemView;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.ThreadLocalRandom;

/**
 * Created by Leo on 2017/6/15.
 * 管理员Controller
 */
@Controller
public class ManagementController {

    @Autowired
    private ManagementServiceImpl managementServiceImpl;

    @Autowired
    private UserServiceImpl userService;

    @Autowired
    private HDFS_Util hdfs_util;

    @Autowired
    private ToolUtil toolUtil;

    //Log4j
    private static final Logger logger = LoggerFactory.getLogger(ManagementController.class);

    //初始化变量
    private static final String port = "9000";
    private static final String download_keyword_1 = "/webhdfs/v1";
    private static final String download_keyword_2 = "?op=OPEN&namenoderpcaddress=" + "localhost:" + port + "&offset=0";
    private static final String FileDir = "uploadFileDir";
    private String download_url = null;

    @ApiOperation(value = "默认跳转到后台登录界面", httpMethod = "GET" , notes = "默认跳转到后台登录界面")
    @RequestMapping(value="/")
    public String goIndex(HttpServletRequest httpServletRequest){
        return "redirect:./adminLogin";
    }

    @ApiOperation(value = "跳转到后台登录界面", httpMethod = "GET", notes = "跳转到后台登录界面")
    @RequestMapping(value = "/adminLogin")
    public String goAdminLogin(HttpServletRequest httpServletRequest){
        return "/adminLogin";
    }

    @ApiOperation(value = "跳转到后台主页", httpMethod = "GET", notes = "跳转到后台主页")
    @RequestMapping(value = "/management")
    public String goAdminHome(HttpServletRequest httpServletRequest){
        return "/management";
    }

    @ApiOperation(value = "获取所有文件夹信息" ,httpMethod = "GET", notes = "获取所有文件夹信息")
    @RequestMapping(value = "get_allFolder")
    @ResponseBody
    public String getAllFolder(Authentication authentication) throws IOException, JSONException {
        //最终选择的地址
        String final_address = null;

        //最终打开的HDFS路径
        String final_path = "/";

        Object[] authObject = authentication.getAuthorities().toArray();
        if (authObject[0].toString().equals("ROLE_USER")){
            User user = userService.findByuserAccount(authentication.getPrincipal().toString());
            final_path = user.getUserFolderRoot();
            System.out.println(final_path);
        }

        //实例化ToolUtil
//        ToolUtil toolUtil = new ToolUtil();
        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);
        final_address = (String) objectList.get(1);
        JSONObject jsonObject = hdfs_util.getFolderOrFileInPath(final_address + ":" + port, final_path, conf);
        return jsonObject.toString();
    }

    @ApiOperation(value = "删除文件" ,httpMethod = "POST", notes = "删除文件")
    @RequestMapping(value = "delete_file")
    @ResponseBody
    public boolean deleteFile(@RequestParam String path, Authentication authentication) {
        //获取用户权限状态
        Object[] objects = authentication.getAuthorities().toArray();
        //用户名
        String username = authentication.getPrincipal().toString();
        //实例化ToolUtil和Hadoop Configuration对象
        ToolUtil toolUtil = new ToolUtil();
        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);

        //执行判断并删除
        System.out.println(objects[0].toString().replace("ROLE_", "").equals("SUPER"));
        if (objects[0].toString().replace("ROLE_", "").equals("SUPER")){
            boolean checkAndDel = hdfs_util.checkAndDel(path, conf, "root");
            System.out.println(checkAndDel);
            return checkAndDel;
        } else {
            boolean checkAndDel = hdfs_util.checkAndDel(path, conf, username);
            System.out.println(checkAndDel);
            return checkAndDel;
        }
    }

    @ApiOperation(value = "上传文件" ,httpMethod = "POST", notes = "上传文件")
    @RequestMapping(value = "/upload_file")
    @ResponseBody
    public Boolean uploadFile(Plupload plupload, HttpServletRequest request, HttpServletResponse response, Authentication authentication){
        plupload.setRequest(request);
        if (!request.getHeader("folder_path").equals("/")) {
            String[] pathArray = request.getHeader("folder_path").replace("hdfs://", "").split("/");
            if (pathArray.length > 1){
                String uploadPath = StringUtils.join(pathArray, "/");
                plupload.setUserUploadPath(uploadPath);
            }
            else {
                String uploadPath = request.getHeader("folder_path").replace("hdfs://", "").split("/")[1];
                plupload.setUserUploadPath("/" + uploadPath);
            }
        }
        else {
            plupload.setUserUploadPath(request.getHeader("folder_path"));
        }
        plupload.setUserAccount(authentication.getPrincipal().toString());

        //文件存储绝对路径,会是一个文件夹，项目相应Servlet容器下的"pluploadDir"文件夹，还会以用户唯一id作划分  
        String System_name = System.getProperties().getProperty("os.name");
        File dir = null;
        if (System_name.equals("Windows 10")) {
            File desktopDir = FileSystemView.getFileSystemView().getHomeDirectory();
            String desktopPath = desktopDir.getAbsolutePath();
            dir = new File(desktopPath + "\\WebHDFS_UploadHistory\\" + authentication.getPrincipal().toString());
            if(!dir.exists()) {
                dir.mkdirs();//可创建多级目录，而mkdir()只能创建一级目录
            }
        } else {
            // Linux的存在tmp目录下
            dir = new File("/tmp/web_HDFS/file_cache/" + authentication.getPrincipal().toString());
            if (!dir.exists()) {
                dir.mkdirs();
            }
        }

        //开始上传文件
        return PluploadServiceImpl.upload(plupload, dir, authentication);
    }

    @ApiOperation(value = "下载文件" ,httpMethod = "POST", notes = "下载文件")
    @RequestMapping(value = "download_file")
    @ResponseBody
    public String downloadFile(@RequestParam String path, HttpServletRequest httpServletRequest) throws IOException {
        /*
          下载的有点问题
          IP地址需要获取到真实的IP
         */
        Map<String, String> map = System.getenv();
        //获取计算机名
        String computer_domain = map.get("USERDOMAIN").toLowerCase();

        ToolUtil toolUtil = new ToolUtil();
        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);

        //获取文件的块(ip_address)
        //hadoopslave1:192.168.88.110
        //hadoopslave2:192.168.88.111
        //hadoopslave3:192.168.88.112
        Map<String, String> fileInCluster = hdfs_util.WhereIsFileInCluster(path, conf);
        if (fileInCluster.size() != 0) {
            List<String> blockMap_slave_address = new ArrayList<>();
            fileInCluster.forEach((key, value) -> blockMap_slave_address.add(value));

            //http://leo-desktop:50075/webhdfs/v1/1522050732582-%E6%B5%8B%E8%AF%95bat.zip?op=OPEN&namenoderpcaddress=localhost:9000&offset=0
            int randomNumber = ThreadLocalRandom.current().nextInt(blockMap_slave_address.size());
            if (randomNumber > 0){
                randomNumber = randomNumber - 1;
            }
            download_url = "http://" + blockMap_slave_address.get(randomNumber) + download_keyword_1 + path + download_keyword_2;
            download_url = download_url.replace("localhost", InetAddress.getLocalHost().getHostAddress());
            System.out.println("1: " + download_url);
            return download_url;
        }
        else {
            download_url = "http://" + computer_domain + ":" + "50070" + download_keyword_1 + path + download_keyword_2;
            download_url = download_url.replace("ns1", "localhost:" + port);
            download_url = download_url.replace("localhost", InetAddress.getLocalHost().getHostAddress());
            System.out.println("2: " + download_url);
            return download_url;
        }
    }

    @ApiOperation(value = "创建文件夹", httpMethod = "POST", notes = "创建文件夹")
    @RequestMapping(value = "create_folder")
    @ResponseBody
    public boolean createFolder(@RequestParam String folder_name, Authentication authentication){
        //获取用户权限状态
        Object[] objects = authentication.getAuthorities().toArray();
        //用户名
        String username = authentication.getPrincipal().toString();
        //实例化ToolUtil和Hadoop Configuration对象
        ToolUtil toolUtil = new ToolUtil();

        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);

        if (objects[0].toString().replace("ROLE_", "").equals("SUPER")){
            System.out.println("ManagementController: " + folder_name);
            if (!folder_name.startsWith("/")){
                return hdfs_util.CreatDir("/" + folder_name, conf, username);
            }
            else {
                return hdfs_util.CreatDir(folder_name, conf, username);
            }

        }
        else {
            System.out.println("ManagementController: " + username);
            System.out.println("ManagementController: " + folder_name);
            return hdfs_util.CreatDir("/" + username+ "/" + folder_name, conf, username);
        }
    }

    @ApiOperation(value = "删除文件夹", httpMethod = "POST", notes = "删除文件夹")
    @RequestMapping(value = "delete_folder")
    @ResponseBody
    public boolean deleteFolder(@RequestParam String folder_path, Authentication authentication) {
        //用户名
        String username = authentication.getPrincipal().toString();
        System.out.println("Username: " + username);

        //实例化ToolUtil和Hadoop Configuration对象
        ToolUtil toolUtil = new ToolUtil();

        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);
        System.out.println(folder_path);
        return hdfs_util.checkAndDel(folder_path, conf, username);
    }

    @ApiOperation(value = "重命名文件", httpMethod = "POST", notes = "重命名文件")
    @RequestMapping(value = "rename_folder")
    @ResponseBody
    public boolean renameFile(@RequestParam String oldPath, @RequestParam String newPath, Authentication authentication){
        //实例化ToolUtil和Hadoop Configuration对象
        ToolUtil toolUtil = new ToolUtil();

        List<Object> objectList = toolUtil.ipSwitch(SpiderStatus.getIsActive());
        Configuration conf = (Configuration) objectList.get(0);
        System.out.println(oldPath);
        System.out.println(newPath);
        if (newPath.startsWith("//")) {
            newPath = newPath.replace("//", "/");
        }
        return hdfs_util.renameFile(oldPath, newPath, conf);
    }
}

