package cn.edu.scau.cmi.crawler.controller;

import java.lang.reflect.InvocationTargetException;
import java.text.ParseException;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;

import javax.servlet.http.HttpServletRequest;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;

import com.alibaba.fastjson.JSONObject;

import cn.edu.scau.cmi.crawler.service.CrawlerdetectImportService;
import cn.edu.scau.cmi.ema.domain.Crawlerdetect;
import cn.edu.scau.cmi.ema.domain.Crawlerfile;
import cn.edu.scau.cmi.ema.domain.base.CmiPagination;
import cn.edu.scau.cmi.ema.util.CmiSetUtil;
import cn.edu.scau.cmi.ema.util.CmiTimeUtil;
import cn.edu.scau.cmi.front.controller.FrontCmiEmaController;

/*
	爬取文件包含不合格检测、合格检测、说明，需要合理的导入。
	把crawlerDetect中的数据导入到Detect中
*/

@Controller
public class CrawlerdetectImportController extends FrontCmiEmaController{
	private static Set<Crawlerdetect> importingCrawlerdetectSet = new HashSet<Crawlerdetect>();
	@Autowired CrawlerdetectImportService crawlerdetectImportService;
	
//	（1）将单个检测文件的--记录--导入到CrawlerDetect表中，测试数据：excelId:329，测试流程基本OK。
	@RequestMapping(value = "/importCrawlerdetect/{crawlerfileId}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView importCrawlerdetect(@PathVariable Integer crawlerfileId, @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		
		Crawlerfile crawlerfile = crawlerfileDAO.findCrawlerfileById(crawlerfileId);

		crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(3));//("正在导入");
		crawlerfile = crawlerfileService.saveCrawlerfile(crawlerfile);
		
		System.out.println("准备导入的文件的Id是：" + crawlerfile.getId());
		System.out.println("\n\n准备导入的文件是：" + crawlerfile.getName());
		
		boolean isSuccess = crawlerdetectImportService.importCrawlerdetect(crawlerfile);
		if(isSuccess) {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(4));//("导入成功");
		}else {
			crawlerfile.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(5));//.setImportablestatus("导入失败");
		}
		crawlerfileService.saveCrawlerfile(crawlerfile);
		
		mav.addObject("pagination", pagination);
		mav.setViewName("redirect:/listCrawlerfilesDashboard");
		return mav;
	}
	
//	（2）将所有检测文件导入到CrawlerDetect表中，用于后台无界面使用
//	由于isinserted是bit类型，有默认的null, false和true三个取值，100个开始导入，以免冲突
	@ResponseBody
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/batchImportCrawlerdetects/{fileQuantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public Object batchImportCrawlerdetects(@PathVariable Integer fileQuantity) throws Exception {
//		把已经确定了导入标准的文件导入到表中，filestatustype的id是8，导入完成后，将文件的状态保存为已经导入的状态，id是4.
		Set<Crawlerfile> excels = crawlerfileDAO.findCrawlerfilesByCrawlerfilestatus(8, -1, fileQuantity);
		//首先将指定数量的文件的状态设置为
		for(Crawlerfile excel:excels) {
			excel.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(2));//已被进程锁定，准备导入
			excel = crawlerfileService.saveCrawlerfile(excel);
		}
		
		for(Crawlerfile excel : excels) {
			excel.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(3));//("正在导入");
			excel = crawlerfileService.saveCrawlerfile(excel);
			
			System.out.println("\n\n准备导入的文件是：" + excel.getName());
			System.out.println("准备导入的文件的Id是：" + excel.getId());
			
			boolean isSuccess = crawlerdetectImportService.importCrawlerdetect(excel);
			if(isSuccess) {
				excel.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(4));//("导入成功");
			}else {
				excel.setCrawlerfilestatus(crawlerfilestatusDAO.findCrawlerfilestatusById(5));//.setImportablestatus("导入失败");
			}
			crawlerfileService.saveCrawlerfile(excel);
		}
		
		JSONObject jsonObject = new JSONObject();
		jsonObject.put("success", "success");
		return JSONObject.parse(jsonObject.toJSONString());
	}
	
//	将检测的事件导入到detect相关数据表格中
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/importDetect/{crawlerdetectId}", method =  { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView importDetect(HttpServletRequest request, @PathVariable Integer crawlerdetectId,  @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		pagination = crawlerdetectPaginationService.updatePagination(request, pagination);
		Set<Crawlerdetect> crawlerdetects = crawlerdetectPaginationService.getPaginationCrawlerdetects(pagination);
		
//		excel isinseted: null,没有处理，0，处理过，有异常，没有处理完，1，处理完毕。
		
		Crawlerdetect crawlerdetect = crawlerdetectDAO.findCrawlerdetectById(crawlerdetectId);
//		把对应的属性封装为对象，如果这些对象是新的对象，就需要特定的完整封装后保存。
//		这些对象有的是基本对象，没有引用其他对象
//		有的和detct是间接关联，需要添加一些中间变量
//		(1) 基本对象：食物
		boolean success = crawlerdetectImportService.importDetect(crawlerdetect);
		
		if(success) {
//			表示处理数据正常结束
			crawlerdetect.setDescription("已经导入到了Detect和其他相关表格中");
			crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//4，已经导入的状态
			crawlerdetectService.saveCrawlerdetect(crawlerdetect);
		}
		System.out.println(crawlerdetect.getName() + "已经导入完成!!!");
		
		mav.addObject("crawlerdetects", crawlerdetects);
		mav.addObject("entityName", "crawlerdetect");
		mav.addObject("pagination", pagination);
		mav.setViewName("redirect:/listCrawlerdetectsDashboard");
		return mav;
	}
	
//	将检测的事件导入到detect相关数据表格中
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/listCrawlerdetectsDashboard", method =  { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView listCrawlerdetectsDashboard(HttpServletRequest request, @ModelAttribute CmiPagination pagination) throws Exception {
		ModelAndView mav = new ModelAndView();
		pagination = crawlerdetectPaginationService.updatePagination(request, pagination);
		Set<Crawlerdetect> crawlerdetects = crawlerdetectPaginationService.getPaginationCrawlerdetects(pagination);
		
		mav.addObject("crawlerdetects", crawlerdetects);
		mav.addObject("entityName", "crawlerdetect");
		mav.addObject("pagination", pagination);
		mav.setViewName("adapter/listCrawlerdetectsDashboard.jsp");
		return mav;
	}

	
//	设计思路：前端控制台指定每台服务器的id范围，以免冲突，
	@ResponseBody
	@Transactional(timeout = 300000000)
	@RequestMapping(value = "/batchImportDetects/{detectQuantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public Object batchImportDetects(@PathVariable Integer detectQuantity) throws Exception {
//		先从开开始的地方，锁定指定的数量，
//		随机获取一个记录，不要顺序，因为并发的时候有可能会都依次拿同一个数据
//		将这个记录放置到一个集合中，表示正在导入，
//		如果获取的记录不在正在导入的集合中，可以导入，
//		批量获取，读取没有导入的记录，将这些记录放置到一个集合中，锁定这些记录，

		for(int i =0; i< detectQuantity ; i++) {
			
//			Crawlerdetect crawlerdetect = getImportingCrawlerdetect();
			Crawlerdetect crawlerdetect = CmiSetUtil.getLastInstance(crawlerdetectDAO.findCrawlerdetectsByCrawlerdetectstatus(1, -1, -1));
			
			if(crawlerdetect == null) {
				continue;
			}
			if(importingCrawlerdetectSet.contains(crawlerdetect)) {
				continue;
			}
			importingCrawlerdetectSet.add(crawlerdetect);

			System.out.println("");
			System.out.println("*********************************************************************");
			System.out.println("准备导入Detec的CrawlerDetect的id是\n" + crawlerdetect.getId());
			
			boolean success = crawlerdetectImportService.importDetect(crawlerdetect);
			if(success) {
				crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//导入成功
			}else {
				crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(5));//导入失败
			}
			crawlerdetect.setDescription("导入这一行数据有错误，请查看。" + crawlerdetect.getDescription());
			crawlerdetectService.saveCrawlerdetect(crawlerdetect);
				
			importingCrawlerdetectSet.remove(crawlerdetect);
		}
		
		JSONObject jsonObject = new JSONObject();
		jsonObject.put("success", "success");
		return JSONObject.parse(jsonObject.toJSONString());
	}
	
//	获取没有正在导入的crawlerdetect
	private Crawlerdetect getImportingCrawlerdetect() {
		Random random = new Random();
		while(true) {
			int radom = random.nextInt(100);
			Crawlerdetect crawlerdetect = CmiSetUtil.getSoleInstance(crawlerdetectDAO.findCrawlerdetectsByCrawlerdetectstatus(1, radom, 1));
			if(!importingCrawlerdetectSet.contains(crawlerdetect)) {
				importingCrawlerdetectSet.add(crawlerdetect);
				return crawlerdetect;
				}
			}
		}

//	更新crawlerdetect中的几个日期字段: producedate, disagreedate, detectanouncedate, detectdate, purchasedate, foodbatch
	@RequestMapping(value = "/updateCrawlerdetectDate/{crawlerdetectId}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView updateCrawlerdetectDate(@PathVariable Integer crawlerdetectId) throws ParseException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException  {
		ModelAndView mav = new ModelAndView();
		
		Crawlerdetect crawlerdetect = crawlerdetectDAO.findCrawlerdetectById(crawlerdetectId);
		
		String oriangeProducedate = crawlerdetect.getProducedate();
		if(oriangeProducedate != null && oriangeProducedate.length() > 6) {//202011
			String producedate = CmiTimeUtil.format(oriangeProducedate);
			crawlerdetect.setProducedate(producedate);
		}
		
		String oriangeDisagreedate = crawlerdetect.getDisagreedate();
		if(oriangeDisagreedate != null && oriangeDisagreedate.length() >6 ) {
			String disagreedate = CmiTimeUtil.format(oriangeDisagreedate);
			crawlerdetect.setProducedate(disagreedate);
		}
		
		String oriangeDetectanouncedate = crawlerdetect.getDetectannouncedate();
		if(oriangeDetectanouncedate != null && oriangeDetectanouncedate.length() > 6) {
			String detectanouncedate = CmiTimeUtil.format(oriangeDetectanouncedate);
			crawlerdetect.setDetectannouncedate(detectanouncedate);
		}
		
		String oriangeDetectdate = crawlerdetect.getDetectdate();
		if(oriangeDetectdate != null && oriangeDetectdate.length() > 6) {
			String detectdate = CmiTimeUtil.format(oriangeDetectdate);
			crawlerdetect.setDetectdate(detectdate);
		}
		
		String oriangePurchasedate = crawlerdetect.getPurchasedate();
		if(oriangePurchasedate != null && oriangePurchasedate.length() > 6) {
			String purchasedate = CmiTimeUtil.format(oriangePurchasedate);
			crawlerdetect.setProducedate(purchasedate);
		}
		
		String oriangeFoodbatch = crawlerdetect.getFoodbatch();
		if(oriangeFoodbatch != null && oriangeFoodbatch.length() > 6) {
			String foodbatch = CmiTimeUtil.format(oriangeFoodbatch);
			crawlerdetect.setFoodbatch(foodbatch);
		}
		
//		CmiTimeUtil.formatAlltypesDate(crawlerdetect,oriangeProducedate,oriangeDisagreedate,oriangeDetectanouncedate,oriangeDetectdate,oriangePurchasedate,oriangeFoodbatch);
		crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//已经导入
		crawlerdetectService.saveCrawlerdetect(crawlerdetect);

		mav.setViewName("adapter/listCrawlerdetectsDashboard.jsp");
		return mav;
	}
	
//	更新crawlerdetect中的几个日期字段: producedate, disagreedate, detectanouncedate, detectdate, purchasedate, foodbatch
	@RequestMapping(value = "/batchUpdateCrawlerdetectDate/{quantity}", method = { RequestMethod.POST, RequestMethod.GET })
	public ModelAndView batchUpdateCrawlerdetectDate(@PathVariable Integer quantity) throws ParseException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException  {
		ModelAndView mav = new ModelAndView();
		
		for(int i = 0; i< quantity; i++) {
			Crawlerdetect crawlerdetect = CmiSetUtil.getLastInstance(crawlerdetectDAO.findCrawlerdetectsByCrawlerdetectstatus(1, -1, 1));//.findCrawlerdetectById(190962 + i);
			
			if(crawlerdetect == null) {
				System.out.println("已经不存在符合条件的crawlerdetect");
				break;
			}
			
			
			System.out.println("准备修改的crawlerdetect的id是：" + crawlerdetect.getId());
			String oriangeProducedate = crawlerdetect.getProducedate();
			if(oriangeProducedate != null && oriangeProducedate.length() > 6) {//202011
				String producedate = CmiTimeUtil.format(oriangeProducedate);
				crawlerdetect.setProducedate(producedate);
			}
			
			String oriangeDisagreedate = crawlerdetect.getDisagreedate();
			if(oriangeDisagreedate != null && oriangeDisagreedate.length() >6 ) {
				String disagreedate = CmiTimeUtil.format(oriangeDisagreedate);
				crawlerdetect.setDisagreedate(disagreedate);
			}
			
			String oriangeDetectanouncedate = crawlerdetect.getDetectannouncedate();
			if(oriangeDetectanouncedate != null && oriangeDetectanouncedate.length() > 6) {
				String detectanouncedate = CmiTimeUtil.format(oriangeDetectanouncedate);
				crawlerdetect.setDetectannouncedate(detectanouncedate);
			}
			
			String oriangeDetectdate = crawlerdetect.getDetectdate();
			if(oriangeDetectdate != null && oriangeDetectdate.length() > 6) {
				String detectdate = CmiTimeUtil.format(oriangeDetectdate);
				crawlerdetect.setDetectdate(detectdate);
			}
			
			String oriangePurchasedate = crawlerdetect.getPurchasedate();
			if(oriangePurchasedate != null && oriangePurchasedate.length() > 6) {
				String purchasedate = CmiTimeUtil.format(oriangePurchasedate);
				crawlerdetect.setPurchasedate(purchasedate);
			}
			
			String oriangeFoodbatch = crawlerdetect.getFoodbatch();
			if(oriangeFoodbatch != null && oriangeFoodbatch.length() > 6) {
				String foodbatch = CmiTimeUtil.format(oriangeFoodbatch);
				crawlerdetect.setFoodbatch(foodbatch);
			}
			
			crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));//已经导入
			crawlerdetectService.saveCrawlerdetect(crawlerdetect);
			
		}

		mav.setViewName("redirect: /ema/batchCrawlerfilesDashboard");
		
		return mav;
	}
	
}