package com.weirq.mvc.cloud;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import org.springframework.web.multipart.commons.CommonsMultipartResolver;

import com.weirq.dao.impl.BaseDaoImpl;
import com.weirq.mvc.BaseController;
import com.weirq.util.BaseUtils;
import com.weirq.util.DateUtil;
import com.weirq.util.Ecode;
import com.weirq.util.Json;
import com.weirq.util.SiteUrl;
import com.weirq.vo.FileSystemVo;
import com.weirq.vo.Menu;
import com.weirq.vo.ShareFile;

/**
 * 
 * @author pc-Haitao
 * @ClassName:CloudController 文件操作的几种方法的实现
 * @Version 版本
 * @Modifiedby 修改人
 * @Copyright 公司名称
 * @date 2016年8月18日下午4:53:29
 */

@Controller
@RequestMapping("/cloud")
public class CloudController extends BaseController {
	/**
	 * 登录用户，查询该用户下的文件
	 * 
	 * @param name
	 * @param session
	 * @param model
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/list")
	public String list(String name, HttpSession session, Model model) throws Exception {
		if (!BaseUtils.isNotEmpty(name)) {
			name = (String) session.getAttribute("username");
			name = "/" + name;
		}
		model.addAttribute("fs", hdfsDB.queryAll(name));
		model.addAttribute("dir", name);
		model.addAttribute("url", BaseUtils.getUrl(name));
		return "/cloud/list";
	}

	/**
	 * 用户在所有文件下搜索文件
	 * 
	 * @param searchName
	 * @param name
	 * @param session
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/searchlist")
	public String searchlist(String name, String searchName, HttpSession session) throws Exception {
		if (session.getAttribute("searchfs") != null) {
			session.removeAttribute("searchfs");
			session.removeAttribute("dirsearch");
			session.removeAttribute("searchName");
		}
		name = (String) session.getAttribute("username");
		if (name != null) {
			name = "/" + name;
		}
		session.setAttribute("searchName", searchName);
		session.setAttribute("searchfs", hdfsDB.queryByName(name, searchName));
		session.setAttribute("dirsearch", name);
		return "/cloud/search";
	}

	/**
	 * 显示文件中所有文档
	 * 
	 * @param name
	 * @param session
	 * @param model
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/listtxt")
	public String listTxt(String name, HttpSession session, Model model) throws Exception {
		if (!BaseUtils.isNotEmpty(name)) {
			name = (String) session.getAttribute("username");
			name = "/" + name;
		}
		model.addAttribute("fstxt", hdfsDB.queryAlltxt(name, name));
		return "/cloud/txt";
	}

	/**
	 * 显示文件中所有图片
	 * 
	 * @param name
	 * @param session
	 * @param model
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/listimg")
	public String listImg(String name, HttpSession session, Model model) throws Exception {
		if (!BaseUtils.isNotEmpty(name)) {
			name = (String) session.getAttribute("username");
			name = "/" + name;
		}
		model.addAttribute("fsimg", hdfsDB.queryAllimg(name, name));
		return "/cloud/img";
	}

	/**
	 * 创建目录
	 * 
	 * @param mkdir
	 * @param session
	 * @return
	 */
	@ResponseBody
	@RequestMapping("/mkdir")
	public Json mkdir(String mkdir, String dirName, HttpSession session) {
		Json json = new Json();
		if (!BaseUtils.isNotEmpty(mkdir)) {
			json.setMsg("空值无效");
			return json;
		}
		String name = (String) session.getAttribute("username");
		if (name == null) {
			json.setMsg("用户已注销，请重新登陆");
			return json;
		}
		try {
			String dir = null;
			if (BaseUtils.isNotEmpty(dirName)) {
				dir = dirName;
			} else {
				dir = "/" + name;
			}
			// 在该用户下创建目录
			hdfsDB.mkdir(dir + "/" + mkdir);
			FileSystemVo fs = new FileSystemVo();
			fs.setName(mkdir);
			fs.setType("D");
			fs.setDate(DateUtil.DateToString("yyyy-MM-dd HH:mm", new Date()));
			json.setObj(fs);
			json.setMsg("创建成功");
			json.setSuccess(true);
		} catch (Exception e) {
			e.printStackTrace();
			json.setMsg("创建失败");
		}
		return json;
	}

	/**
	 * 上传文件
	 * 
	 * @param dir
	 * @param session
	 * @param request
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/upload")
	public Json upload(String dir, HttpSession session, HttpServletRequest request) throws Exception {
		Json json = new Json();
		String name = (String) session.getAttribute("username");
		if (name == null) {
			json.setMsg("用户已注销，请重新登陆");
			return json;
		}
		CommonsMultipartResolver multipartResolver = new CommonsMultipartResolver(request.getServletContext());
		if (multipartResolver.isMultipart(request)) {
			MultipartHttpServletRequest multipartRequest = (MultipartHttpServletRequest) request;
			Map<String, MultipartFile> fms = multipartRequest.getFileMap();
			for (Map.Entry<String, MultipartFile> entity : fms.entrySet()) {
				MultipartFile mf = entity.getValue();
				InputStream in = mf.getInputStream();
				hdfsDB.upload(in, dir + "/" + mf.getOriginalFilename());
				in.close();
				json.setSuccess(true);
			}
		}
		return json;
	}

	/**
	 * 删除文件及文件夹
	 * 
	 * @param ids
	 * @param dir
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/delete")
	public Json delete(String ids, String dir) throws Exception {
		Json json = new Json();
		try {
			String[] ns = ids.split(",");
			for (int i = 0; i < ns.length; i++) {
				hdfsDB.delete(dir + "/" + ns[i]);
			}
			json.setSuccess(true);
			json.setMsg("删除成功");
		} catch (Exception e) {
			json.setMsg("删除失败");
			e.printStackTrace();
		}
		return json;
	}

	/**
	 * 移动文件或文件夹
	 * 
	 * @param ids
	 * @param dir
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/copy")
	public Json copy(String ids, String dir, String dst, boolean flag) throws Exception {
		Json json = new Json();
		String[] ns = ids.split(",");
		for (int i = 0; i < ns.length; i++) {
			ns[i] = dir + "/" + ns[i];
		}
		try {
			hdfsDB.copy(ns, dst, flag);
			json.setSuccess(true);
			json.setMsg("删除成功");
		} catch (Exception e) {
			json.setMsg("删除失败");
			e.printStackTrace();
		}
		return json;
	}

	/**
	 * 重命名文件或文件夹
	 * 
	 * @param dir
	 * @param name
	 * @param rename
	 * @param type
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/rename")
	public Json rename(String dir, String name, String rename, String type) throws Exception {
		Json json = new Json();
		try {
			if (type.equals("F")) {
				hdfsDB.rename(dir + "/" + name, dir + "/" + rename + name.substring(name.lastIndexOf(".")));
			} else if (type.equals("D")) {
				hdfsDB.rename(dir + "/" + name, dir + "/" + rename);
			}
			json.setSuccess(true);
			json.setMsg("重命名成功");
		} catch (Exception e) {
			json.setMsg("删除失败");
			e.printStackTrace();
		}
		return json;
	}

	/**
	 * 下载文件 到自己指定的文件夹
	 * 
	 * @param dir
	 * @param name
	 * @param response
	 * @throws Exception
	 */
	@RequestMapping("/download")
	public void download(String dir, String name, HttpServletResponse response) throws Exception {
		hdfsDB.downLoad(dir + "/" + name, response, name);
	}

	/**
	 * 复制文件或文件夹
	 * 
	 * @param ids
	 * @param dir
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/tree")
	public List<Menu> tree(String id, HttpSession session) throws Exception {
		List<Menu> menus = null;
		if (BaseUtils.isNotEmpty(id)) {
			menus = hdfsDB.tree(id);
		} else {
			String name = (String) session.getAttribute("username");
			if (name == null) {
				return null;
			}
			menus = hdfsDB.tree("/" + name);
		}
		return menus;
	}

	/**
	 * 开始分享文件
	 * 
	 * @param dir
	 * @param link
	 * @param session
	 * @param model
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/share")
	public String share(String dir, String link, HttpSession session) throws Exception {
		ShareFile shareFile = new ShareFile();
		String[] re = dir.split(",");// 用split()函数直接分割
		for (String string : re) {
			new BaseDaoImpl().deleteshare(string);// 对于同一个文件先删除之前的分享
			shareFile.setUsername((String) session.getAttribute("username"));
			shareFile.setLink(link);
			shareFile.setId(UUID.randomUUID().toString());
			shareFile.setEcode(new Ecode().code());
			shareFile.setFileName(string);
			shareFile.setTime(new Date());
			new BaseDaoImpl().add(shareFile);// 将用户分享数据存到数据库
		}
		return "/cloud/share";
	}

	/**
	 * 展示该用户下所有分享的文件
	 * 
	 * @param name
	 * @param session
	 * @param model
	 * @return
	 * @throws Exception
	 */
	@RequestMapping("/sharelist")
	public String sharelist(String name, HttpSession session, Model model) throws Exception {
		name = (String) session.getAttribute("username");
		List<Map<String, Object>> list = new BaseDaoImpl().findbysharename(name);
		model.addAttribute("share", list);
		return "/cloud/share";
	}

	/**
	 * 取消分享
	 * 
	 * @param ids
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/deleteshare")
	public Json deleteshare(String ids) throws Exception {
		Json json = new Json();
		try {
			String[] re = ids.split(",");// 用split()函数直接分割
			for (String string : re) {
				new BaseDaoImpl().deleteshare(string);
			}
			json.setSuccess(true);
			json.setMsg("取消分享成功!");
		} catch (Exception e) {
			json.setMsg("取消分享失败!");
			e.printStackTrace();
		}
		return json;
	}

	/**
	 * 提取分享文件
	 * 
	 * @param id
	 * @param ecode
	 * @return
	 */
	@ResponseBody
	@RequestMapping("/extract")
	public String extract(String id, String ecode) {
		BaseDaoImpl baseDaoImpl = new BaseDaoImpl();
		if (baseDaoImpl.findByEcode(id, ecode).size() > 0) {
			String s = baseDaoImpl.findByEcode(id, ecode).get(0).get("link").toString();
			return s;
		} else {
			return "error";
		}
	}

	/**
	 * 在线浏览文件内容 只支持txt文件
	 * 
	 * @param dir
	 * @param name
	 * @param session
	 * @return
	 * @throws Exception
	 */
	@ResponseBody
	@RequestMapping("/view")
	public void view(String dir, String name, HttpServletResponse response) throws Exception {
		String uri = SiteUrl.readUrl("hdfs") + dir + "/" + name;// 文件在hadoop上的地址
		FileSystem fs = FileSystem.get(URI.create(uri), new Configuration());// 得到hadoop文件类对象
		InputStreamReader read = new InputStreamReader(fs.open(new Path(uri)), "GB2312");// 字符输入流对象
		BufferedReader bufferedReader = new BufferedReader(read);// 包装字符流类，将字符流放入缓存里，先把字符读到缓存里，到缓存满了或者你flush的时候，再读入内存，就是为了提供读的效率而设计的
		String lineTxt = null;
		while ((lineTxt = bufferedReader.readLine()) != null) {
			response.getWriter().println(lineTxt);
		}
	}
}
