package com.cmge.ad.service.impl;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import us.codecraft.webmagic.Spider;

import com.cmge.ad.mapper.CrawlMapper;
import com.cmge.ad.model.AlbumStore;
import com.cmge.ad.model.ArticleStore;
import com.cmge.ad.model.Crawl;
import com.cmge.ad.model.LockPaint;
import com.cmge.ad.model.PaintReq;
import com.cmge.ad.model.PaintResp;
import com.cmge.ad.model.PictureStore;
import com.cmge.ad.service.CrawlService;
import com.cmge.ad.spider.AlbumListPageProcessor;
import com.cmge.ad.spider.ArticleListPageProcessor;
import com.cmge.ad.spider.PorcessorManager;
import com.cmge.ad.util.Constant;
import com.cmge.ad.util.Pager;
import com.cmge.ad.util.StringUtils;
import com.cmge.ad.util.SuprUtil;

@Service
@Transactional(isolation=Isolation.DEFAULT,rollbackFor=Exception.class)
public class CrawlServiceImpl implements CrawlService{
	
	@Autowired
	private CrawlMapper crawlMapper;
	
	@Override
	public void addCrawl(Crawl crawl) {
		crawlMapper.addCrawl(crawl);
	}
	
	@Override
	public void addAlbumCrawlInfo(Crawl crawl) {
		crawlMapper.addAlbumCrawlInfo(crawl);
	}
	
	@Override
	public void updateCrawlInfo(Crawl crawl) {
		crawlMapper.updateCrawlInfo(crawl);
	}
	
	@Override
	public void starCralTask(Crawl crawl) {
		crawlMapper.startCralTask(crawl);
	}
	
	@Override
	public void autoCrawlArticle() {
		// 查询所有来源段子网站 最好一次只跑一个  多个线程开启 同时往仓库表中添加数据 可能出现锁等待异常
		List<Crawl> crawlList = crawlMapper.getAutoCrawlList();
		// 遍历启动定时任务
		if(!SuprUtil.isEmptyCollection(crawlList)){
			for(Crawl crawl : crawlList){
				runCrawlTask(crawl);
			}
		}
	}
	
	@Override
	public void starCralTaskById(String id) {
		Crawl crawl = scanCrawlTaskById(id);
		if(crawl.getType() == 1){
			// 段子
			starCralTask(crawl);
			runArticleCrawlTask(crawl);
		}else if(crawl.getType() == 2){
			// 相册
			starCralTask(crawl);
			runAlbumCrawlTask(crawl);
		}
	}
	
	@Override
	public void runCrawlTask(Crawl crawl) {
		if(crawl.getType() == 1){
			starCralTask(crawl);
			runArticleCrawlTask(crawl);
		}else if(crawl.getType() == 2){
			starCralTask(crawl);
			runAlbumCrawlTask(crawl);
		}
	}
	
	@Override
	public Crawl scanCrawlTaskById(String id) {
		return crawlMapper.scanCrawlTaskById(id);
	}
	
	@Override
	@Transactional(propagation=Propagation.NOT_SUPPORTED,readOnly=true)
	public void runArticleCrawlTask(Crawl crawl) {
		ArticleListPageProcessor processor = new ArticleListPageProcessor()
				.setId(String.valueOf(crawl.getId()))
				.setStart(crawl.getStart()+1)
				.setEnd(crawl.getEnd())
				.setSleepTime(crawl.getSleepTime())
				.setListUrl(crawl.getListUrl().replace("$1", "\\w+"))
				.setInfoUrl(crawl.getInfoUrlRegular()+"\\w+")
				.setListUrlTemplate(crawl.getListUrl())
				.setInfoUrlPre(crawl.getInfoUrlPre())
				.setInfoUrlXpath(crawl.getInfoUrlXpath())
				.setInfoContentXpath(crawl.getInfoContentXpath())
				.setInfoUrlRegular(crawl.getInfoUrlRegular())
				.setListUrlRegular(crawl.getListUrlRegular())
				.setInfoPicXpath(crawl.getInfoPicXpath())
				.setRepeatFlagXpath(crawl.getRepeatFlagXpath())
				.setRepeatMaxTimes(crawl.getRepeatMaxTimes())
				.setSiteId(crawl.getSiteId())
				.setSiteCategoryId(crawl.getSiteCategoryId());
		
		Spider spider = Spider.create(processor)
		.addUrl(crawl.getListUrl().replace("$1", String.valueOf(crawl.getStart())))
		.thread(1);
		
		processor.setSpider(spider);
		
		PorcessorManager.addArticleProcessor(processor);
		
		// 用线程启动任务  否则可能导致表锁等待
		spider.run();
		
		// 爬虫任务完成  更新状态
		crawlMapper.stopCralTask(crawl);
	}
	
	@Override
	@Transactional(propagation=Propagation.NOT_SUPPORTED,readOnly=true)
	public void runAlbumCrawlTask(Crawl crawl) {
		AlbumListPageProcessor processor = new AlbumListPageProcessor()
				.setId(String.valueOf(crawl.getId()))
				.setStart(crawl.getStart()+1)
				.setEnd(crawl.getEnd())
				.setSleepTime(crawl.getSleepTime())
				.setListUrl(crawl.getListUrl().replace("$1", "\\w+"))
				.setInfoUrl(crawl.getInfoUrlRegular()+"\\w+")
				.setListUrlTemplate(crawl.getListUrl())
				.setInfoUrlPre(crawl.getInfoUrlPre())
				.setInfoUrlXpath(crawl.getInfoUrlXpath())
				.setInfoContentXpath(crawl.getInfoContentXpath())
				.setInfoUrlRegular(crawl.getInfoUrlRegular())
				.setListUrlRegular(crawl.getListUrlRegular())
				.setInfoPicXpath(crawl.getInfoPicXpath())
				.setRepeatFlagXpath(crawl.getRepeatFlagXpath())
				.setRepeatMaxTimes(crawl.getRepeatMaxTimes())
				.setSiteId(crawl.getSiteId())
				.setNextUrlPre(crawl.getNextUrlPre())
				.setAlbumIdSign(crawl.getAlbumIdSign())
				.setInfoListNextXpath(crawl.getInfoListNextXpath())
				.setInfoFirstRegular(crawl.getInfoFirstRegular())
				.setTitleXpath(crawl.getTitleXpath())
				.setSiteCategoryId(crawl.getSiteCategoryId());
		
		Spider spider = Spider.create(processor)
		.addUrl(crawl.getListUrl().replace("$1", String.valueOf(crawl.getStart())))
		.thread(1);
		
		processor.setSpider(spider);
		
		PorcessorManager.addAlbumProcessor(processor);
		
		// 用线程启动任务  否则可能导致表锁等待
		spider.run();
		
//		// 爬虫任务完成  更新状态
		crawlMapper.stopCralTask(crawl);
	}
	
	@Override
	public Crawl scanCrawlTask() {
		return crawlMapper.scanCrawlTask();
	}
	
	@Override
	public void addArticleStore(ArticleStore articleStore) {
		crawlMapper.addArticleStore(articleStore);
	}
	
	@Override
	public boolean isExistArticleUniqueId(String uniqueId) {
		return crawlMapper.isExistArticleUniqueId(uniqueId) > 0 ? true : false;
	}
	
	@Override
	public boolean isExistAlbumUniqueId(String uniqueId) {
		return crawlMapper.isExistAlbumUniqueId(uniqueId) > 0 ? true : false;
	}
	
	@Override
	public void getCrawlList(Pager<Crawl> pager, HashMap<String, Object> paramMap) {
		int count = crawlMapper.getCrawlCount(paramMap);
		pager.setTotalCount(count);
		if(count > 0){
			paramMap.put("start", pager.getStart());
			paramMap.put("limit", pager.getLimit());
			List<Crawl> appList = crawlMapper.getCrawlList(paramMap);
			pager.setRows(appList);
		}
	}
	
	@Override
	public void addAlbumStore(AlbumStore albumStore) {
		crawlMapper.addAlbumStore(albumStore);
	}
	
	@Override
	public void addPictureStore(PictureStore ps) {
		crawlMapper.addPictureStore(ps);
	}
	
	@Override
	public void savePaint(LockPaint paint) {
		crawlMapper.savePaint(paint);
	}
	
	@Override
	public PaintResp getPaintList(PaintReq getReq) {
		PaintResp resp = new PaintResp(Constant.DEAL_SUCCESS);
		List<LockPaint> paintList = crawlMapper.getPaintList(getReq);
		resp.setPaintList(paintList);
		return resp;
	}
	
}
