package cn.edu.scau.cmi.crawler.saver;

import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import cn.edu.scau.cmi.crawler.wrapper.CrawlerdetectWrapper;
import cn.edu.scau.cmi.ema.dao.CrawlerdetectstatusDAO;
import cn.edu.scau.cmi.ema.domain.Crawlerdetect;
import cn.edu.scau.cmi.ema.domain.Crawlerfile;
import cn.edu.scau.cmi.ema.service.CrawlerdetectService;
import cn.edu.scau.cmi.ema.service.CrawlereventService;
import cn.edu.scau.cmi.ema.service.base.EntityFactoryService;

//后去每个站点一个ImportService
@Service("crawlerdetectSaver")

@Transactional(timeout = 300000000)
public class CrawlerdetectSaverImpl implements CrawlerdetectSaver{


	@Autowired EntityFactoryService entityFactory;
	@Autowired CrawlereventService crawlereventService;
	@Autowired CrawlerdetectService crawlerdetectService;
	
	@Autowired CrawlerdetectWrapper crawlerdetectWrapper;
	
	@Autowired CrawlerdetectstatusDAO crawlerdetectstatusDAO;

	@Override
	public void saveActualDetectRowIntoCrawlerdetect(Crawlerfile crawlerfile, Sheet sheet, Row row) {
		Crawlerdetect crawlerdetect = crawlerdetectWrapper.wrapCrawlerdetect(crawlerfile, sheet, row);
		
		crawlerdetect.setCrawlerdetectstatus(crawlerdetectstatusDAO.findCrawlerdetectstatusById(4));
		crawlerdetect.setFilename(crawlerfile.getName());
		crawlerdetect.setRownumber(String.valueOf(row.getRowNum() +1));
		crawlerdetect.setMaxcolumn(getMaxcolumn(crawlerfile));
		crawlerdetect.setDescription(crawlerdetect.getDescription() + "数据导入成功");
		
		crawlerdetect = crawlerdetectService.saveCrawlerdetect(crawlerdetect);
		System.out.println("******" + crawlerfile.getName() + "\n******第 " + row.getRowNum() + "行保存成功");
		System.out.println("******" + "导入的crawlerdetect数据的id是" + crawlerdetect.getId());
		
	}
	
	private String getMaxcolumn(Crawlerfile crawlerfile) {
		
//		DetectJJ0 或者 EventJJ0
		String standardName = crawlerfile.getCrawlerfilestandard().getName();
		
		int indexEvent = standardName.indexOf("Event");
		int indexDetect = standardName.indexOf("Detect");
		String max = null;
		if(indexEvent >0) {
			max = standardName.substring(indexEvent + 5, indexEvent + 7);
			System.out.println(max);
		}else {
			max = standardName.substring(indexDetect + 6, indexDetect + 8);
			System.out.println(max);
		}
		return max;
	}
}