package cn.edu.scau.cmi.crawler.controller;

import java.lang.reflect.InvocationTargetException;
import java.text.ParseException;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;

import javax.servlet.http.HttpServletRequest;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;

import com.alibaba.fastjson.JSONObject;

import cn.edu.scau.cmi.crawler.service.CrawlerdetectImportService;
import cn.edu.scau.cmi.crawler.service.CrawlereventImportService;
import cn.edu.scau.cmi.ema.domain.Crawlerdetect;
import cn.edu.scau.cmi.ema.domain.Crawlerevent;
import cn.edu.scau.cmi.ema.domain.Crawlerfile;
import cn.edu.scau.cmi.ema.domain.base.CmiPagination;
import cn.edu.scau.cmi.ema.util.CmiSetUtil;
import cn.edu.scau.cmi.ema.util.CmiTimeUtil;
import cn.edu.scau.cmi.front.controller.FrontCmiEmaController;

/*
	爬取文件包含不合格检测、合格检测、说明，需要合理的导入。
*/

@Controller
public class CrawlereventImportController extends FrontCmiEmaController{
	private static Set<Crawlerevent> importingCrawlerdetectSet = new HashSet<Crawlerevent>();
	@Autowired CrawlereventImportService crawlereventImportService;

//	（3）将单个不合格检测文件导入到CrawlerEvent表中，先测试上海的
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/importCrawlerevent/{crawlerfileId}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView importCrawlerevent(@PathVariable Integer crawlerfileId, @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		Crawlerfile crawlerfile = crawlerfileDAO.findCrawlerfileById(crawlerfileId);
		crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(3));//("正在导入");
		crawlerfile = crawlerfileService.saveCrawlerfile(crawlerfile);
		
		System.out.println("\n\n准备导入的文件是：" + crawlerfile.getName());
		System.out.println("准备导入的文件的Id是：" + crawlerfile.getId());
		
		boolean isSuccess = crawlereventImportService.importCrawlerevent(crawlerfile);
		if(isSuccess) {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(4));//("导入成功");
		}else {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(5));//("导入失败");
		}
		crawlerfileService.saveCrawlerfile(crawlerfile);
		
		mav.addObject("pagination", pagination);
		mav.setViewName("redirect:/listCrawlerfilesDashboard");
		return mav;
	}
	
	
//	（4）将所有不合格检测文件导入到CrawlerEvent表中
	@ResponseBody
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/batchImportCrawlerevents/{fileQuantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public Object batchImportCrawlerevents(@PathVariable Integer fileQuantity) throws Exception {
//		文件类型是2，不合格检测文件；importablestatus状态是“爬取初始状态”，自定义的命名查询。
//		Set<Crawlerfile> excels = crawlerfileDAO.findCrawlerfilesByCrawlerfilestatus(8, -1, fileQuantity);//Set<Crawlerfile> excels = null;//scauCmiCrawlerdetectDAO.findDetectCrawlerfileByFiletypeAndImporteventstatus(2, "爬取初始状态", -1, fileQuantity);
//		Set<Crawlerfile> excels =  crawlerfileDAO.findCrawlerfileByImportrelationstatus("爬取初始状态", -1, fileQuantity);
		Set<Crawlerfile> tobeImportCrawlerfiles = new HashSet<>();
		Set<Crawlerfile> allEventFiles = crawlerfileDAO.findCrawlerfilesByFiletype(2, -1, -1);//.findCrawlerfilesByCrawlerfilestatus(8, -1, fileQuantity);
		for(Crawlerfile file : allEventFiles) {
			if(tobeImportCrawlerfiles.size() >= fileQuantity) {
				break;
			}
			
			if(file.getCrawlerfilestandard() == null) {
				continue;
			}
			
			if(file.getCrawlerfilestatus() == null || file.getCrawlerfilestatus().getId() == 1 || file.getCrawlerfilestatus().getId() == 5 || file.getCrawlerfilestatus().getId() == 7 || file.getCrawlerfilestatus().getId() == 8) {
				tobeImportCrawlerfiles.add(file);
			}
		}
		
		
		//		首先将指定数量的文件的状态设置为
		for(Crawlerfile crawlerfile : tobeImportCrawlerfiles) {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(2));//("已被进程锁定，准备导入");
			crawlerfile = crawlerfileService.saveCrawlerfile(crawlerfile);
		}
		
		for(Crawlerfile crawlerfile :tobeImportCrawlerfiles) {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(3));//("正在导入");
			crawlerfile = crawlerfileService.saveCrawlerfile(crawlerfile);
			
			System.out.println("\n\n准备导入的文件是：" + crawlerfile.getName());
			System.out.println("准备导入的文件的Id是：" + crawlerfile.getId());
			boolean isSuccess = crawlereventImportService.importCrawlerevent(crawlerfile);
			if(isSuccess) {
				crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(4));//("导入成功");
			}else {
				crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(5));//("导入失败");
			}
			crawlerfile = crawlerfileService.saveCrawlerfile(crawlerfile);
		}
		JSONObject jsonObject = new JSONObject();
		jsonObject.put("success", "success");
		return JSONObject.parse(jsonObject.toJSONString());
	}
	
	
	
	
//	将检测的事件导入到event相关数据表格中
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/importEvent/{crawlereventId}",  method =  { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView importEvent(HttpServletRequest request, @PathVariable Integer crawlereventId,  @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		pagination = crawlerdetectPaginationService.updatePagination(request, pagination);
		Set<Crawlerevent> crawlerevents = crawlereventPaginationService.getPaginationCrawlerevents(pagination);
//		excel isinseted: null,没有处理，0，处理过，有异常，没有处理完，1，处理完毕。
		
		Crawlerevent crawlerevent = crawlereventDAO.findCrawlereventById(crawlereventId);
//		把对应的属性封装为对象，如果这些对象是新的对象，就需要特定的完整封装后保存。
//		这些对象有的是基本对象，没有引用其他对象
//		有的和detct是间接关联，需要添加一些中间变量
//		(1) 基本对象：食物
		boolean success = crawlereventImportService.importEvent(crawlerevent);
		
		if(success) {
//			表示处理数据正常结束
			crawlerevent.setDescription("已经导入到了Detect和其他相关表格中");
			crawlerevent.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//导入成功
			crawlereventService.saveCrawlerevent(crawlerevent);
		}
		
		System.out.println(crawlerevent.getName() + "已经导入完成!!!");
		mav.addObject("crawlerevents", crawlerevents);
		mav.addObject("entityName", "crawlerevent");
		mav.addObject("pagination", pagination);
		mav.setViewName("adapter/listCrawlereventsDashboard.jsp");
		return mav;
	}

	
	@ResponseBody
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/batchImportEvents/{detectQuantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public Object batchImportEvents(@PathVariable Integer detectQuantity) throws Exception {
		for(int i =0; i < detectQuantity; i++) {
			Crawlerevent crawlerevent = CmiSetUtil.getLastInstance(crawlereventDAO.findCrawlereventsByCrawlerdetectstatus(1, -1, -1));
			if(crawlerevent == null) {
				continue;
			}
			
			if(importingCrawlerdetectSet.contains(crawlerevent)) {
				continue;
			}
			importingCrawlerdetectSet.add(crawlerevent);
			
			System.out.println("");
			System.out.println("*********************************************************************");
			System.out.println("准备导入Event的CrawlerEvent的id是\n" + crawlerevent.getId());
			System.out.println("准备导入Event的CrawlerEvent的名称是\n： " + crawlerevent.getName());		
			
			boolean success = crawlereventImportService.importEvent(crawlerevent);
			
			if(success) {
				crawlerevent.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//.setImportstatus(filestatustypeDAO.findFilestatustypeById(4));//("导入Detect以及相关表成功");
			}else {
				crawlerevent.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//.setImportstatus(filestatustypeDAO.findFilestatustypeById(4));//("导入Detect以及相关表成功");
			}
			
			if(!success) {
				crawlerevent.setDescription("导入这一行数据有错误，请查看。" + crawlerevent.getDescription());
				crawlereventService.saveCrawlerevent(crawlerevent);
			}
			importingCrawlerdetectSet.remove(crawlerevent);
		}
		JSONObject jsonObject = new JSONObject();
		jsonObject.put("success", "success");
		return JSONObject.parse(jsonObject.toJSONString());
	}
	
	
//	将检测的事件导入到detect相关数据表格中
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/listCrawlereventsDashboard", method =  { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView listCrawlerdetectsDashboard(HttpServletRequest request, @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		pagination = crawlereventPaginationService.updatePagination(request, pagination);
		Set<Crawlerevent> crawlerevents = crawlereventPaginationService.getPaginationCrawlerevents(pagination);
		
		mav.addObject("crawlerevents", crawlerevents);
		mav.addObject("entityName", "crawlerevent");
		mav.addObject("pagination", pagination);
		mav.setViewName("adapter/listCrawlereventsDashboard.jsp");
		return mav;
	}
	
	
//	更新crawlerdetect中的几个日期字段: producedate, disagreedate, detectanouncedate, detectdate, purchasedate, foodbatch
	@RequestMapping(value = "/updateCrawlereventDate/{crawlereventId}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView updateCrawlereventDate(@PathVariable Integer crawlereventId) throws ParseException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException  {
		ModelAndView mav = new ModelAndView();
		
		Crawlerevent crawlerevent = crawlereventDAO.findCrawlereventById(crawlereventId);
		
		String oriangeProducedate = crawlerevent.getProducedate();
		if(oriangeProducedate != null && oriangeProducedate.length() > 6) {//202011
			String producedate = CmiTimeUtil.format(oriangeProducedate);
			crawlerevent.setProducedate(producedate);
		}
		
		String oriangeDisagreedate = crawlerevent.getDisagreedate();
		if(oriangeDisagreedate != null && oriangeDisagreedate.length() >6 ) {
			String disagreedate = CmiTimeUtil.format(oriangeDisagreedate);
			crawlerevent.setProducedate(disagreedate);
		}
		
		String oriangeDetectanouncedate = crawlerevent.getDetectannouncedate();
		if(oriangeDetectanouncedate != null && oriangeDetectanouncedate.length() > 6) {
			String detectanouncedate = CmiTimeUtil.format(oriangeDetectanouncedate);
			crawlerevent.setDetectannouncedate(detectanouncedate);
		}
		
		String oriangeDetectdate = crawlerevent.getDetectdate();
		if(oriangeDetectdate != null && oriangeDetectdate.length() > 6) {
			String detectdate = CmiTimeUtil.format(oriangeDetectdate);
			crawlerevent.setDetectdate(detectdate);
		}
		
		String oriangePurchasedate = crawlerevent.getPurchasedate();
		if(oriangePurchasedate != null && oriangePurchasedate.length() > 6) {
			String purchasedate = CmiTimeUtil.format(oriangePurchasedate);
			crawlerevent.setProducedate(purchasedate);
		}
		
		String oriangeFoodbatch = crawlerevent.getFoodbatch();
		if(oriangeFoodbatch != null && oriangeFoodbatch.length() > 6) {
			String foodbatch = CmiTimeUtil.format(oriangeFoodbatch);
			crawlerevent.setFoodbatch(foodbatch);
		}
		
		crawlerevent.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//已经导入
		
		crawlereventService.saveCrawlerevent(crawlerevent);

		mav.setViewName("adapter/listCrawlereventsDashboard.jsp");
		return mav;
	}
	
//	更新crawlerdetect中的几个日期字段: producedate, disagreedate, detectanouncedate, detectdate, purchasedate, foodbatch
	@RequestMapping(value = "/batchUpdateCrawlereventDate/{quantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView batchUpdateCrawlereventDate(@PathVariable Integer quantity) throws ParseException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException  {
		ModelAndView mav = new ModelAndView();
		
		for(int i = 0; i< quantity; i++) {
			Crawlerevent crawlerevent = CmiSetUtil.getLastInstance(crawlereventDAO.findCrawlereventsByCrawlerdetectstatus(1, -1, 1));//.findCrawlerdetectById(190962 + i);
			
			if(crawlerevent == null) {
				System.out.println("已经不存在符合条件的crawlerevent");
				break;
			}
			System.out.println("准备修改的crawlerdetect的id是：" + crawlerevent.getId());
			String oriangeProducedate = crawlerevent.getProducedate();
			if(oriangeProducedate != null && oriangeProducedate.length() > 6) {//202011
				String producedate = CmiTimeUtil.format(oriangeProducedate);
				crawlerevent.setProducedate(producedate);
			}
			
			String oriangeDisagreedate = crawlerevent.getDisagreedate();
			if(oriangeDisagreedate != null && oriangeDisagreedate.length() >6 ) {
				String disagreedate = CmiTimeUtil.format(oriangeDisagreedate);
				crawlerevent.setDisagreedate(disagreedate);
			}
			
			String oriangeDetectanouncedate = crawlerevent.getDetectannouncedate();
			if(oriangeDetectanouncedate != null && oriangeDetectanouncedate.length() > 6) {
				String detectanouncedate = CmiTimeUtil.format(oriangeDetectanouncedate);
				crawlerevent.setDetectannouncedate(detectanouncedate);
			}
			
			String oriangeDetectdate = crawlerevent.getDetectdate();
			if(oriangeDetectdate != null && oriangeDetectdate.length() > 6) {
				String detectdate = CmiTimeUtil.format(oriangeDetectdate);
				crawlerevent.setDetectdate(detectdate);
			}
			
			String oriangePurchasedate = crawlerevent.getPurchasedate();
			if(oriangePurchasedate != null && oriangePurchasedate.length() > 6) {
				String purchasedate = CmiTimeUtil.format(oriangePurchasedate);
				crawlerevent.setPurchasedate(purchasedate);
			}
			
			String oriangeFoodbatch = crawlerevent.getFoodbatch();
			if(oriangeFoodbatch != null && oriangeFoodbatch.length() > 6) {
				String foodbatch = CmiTimeUtil.format(oriangeFoodbatch);
				crawlerevent.setFoodbatch(foodbatch);
			}
			
			crawlerevent.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//已经导入
			crawlereventService.saveCrawlerevent(crawlerevent);
		}

		mav.setViewName("redirect: /ema/batchCrawlerfilesDashboard");
		return mav;
	}

//	获取没有正在导入的crawlerevent
	private Crawlerevent getImportingCrawlerevent() {
		Random random = new Random();
		while (true) {
			int radom = random.nextInt(100);
			Crawlerevent crawlerevent = CmiSetUtil.getSoleInstance(crawlereventDAO.findCrawlereventsByCrawlerdetectstatus(1, radom, 1));
			if (!importingCrawlerdetectSet.contains(crawlerevent)) {
				importingCrawlerdetectSet.add(crawlerevent);
				return crawlerevent;
			}
		}
	}
}