package tech.abing.spider.task.crawler;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;

import tech.abing.spider.CrawTaskBuilder;
import tech.abing.spider.CrawlManager;
import tech.abing.spider.CrawlTask;
import tech.abing.spider.component.DynamicEntrance;
import tech.abing.spider.component.listener.TaskLifeListener;
import tech.abing.spider.constants.FinalConstants;
import tech.abing.spider.downloader.impl.DefaultPageDownloader;
import tech.abing.spider.request.PageRequest;
import tech.abing.spider.request.PageRequest.PageEncoding;
import tech.abing.spider.request.StartContext;
import tech.abing.spider.task.model.TaskTrade;
import tech.abing.spider.task.pageprocessor.PayLogProccessor1;
import tech.abing.spider.task.pipeline.PayLogPipeline;
import tech.abing.spider.task.proxy.MyProxyIpPool;
import tech.abing.spider.util.DateUtils;
import tech.abing.spider.util.RedisUtil;
import tech.abing.spider.util.SysUtil;

public class PayLogSpider {

	private static Logger logger = Logger.getLogger(PayLogSpider.class);
	
	private static RedisUtil redisUtil = null;
	
	//抓取任务需要的cookie列表
	private static List<String> cookieList = null;
	
	public void crawl(){
		this.crawl(1, 3, 4000);//重试3次
	}
	
	public void crawl(int politenessDelay){
		this.crawl(1, 3, politenessDelay);//重试3次
	}

	public void crawl(int threads, int retryCount, int politenessDelay){
		
		CrawTaskBuilder builder = CrawlManager.getInstance()
				.prepareCrawlTask("根据产品ID获取交易记录", DefaultPageDownloader.class)
				.useThread(threads)
				.useDynamicEntrance(RedisDynamicEntranceImpl.class)
				.usePageRetryCount(retryCount)
				.setPolitenessDelay(politenessDelay)
				.usePipeline(PayLogPipeline.class)
				.useProxyIpPool(MyProxyIpPool.class, 10, 1000 * 60 * 30, -1)//每次网络请求10个代理ip,ip30分钟后过期,有效的ip不限制使用次数
				.useTaskLifeListener(new TaskLifeListener() {
					
					@Override
					public void onStart(CrawlTask task) {
						if(redisUtil == null){
							redisUtil = RedisUtil.getInstance();
							logger.info("Redis连接池创建成功！");
							
						}
						
						if(cookieList == null){
							cookieList = new ArrayList<String>();
						}
					}
					
					@Override
					public void onFinished(CrawlTask task) {
						task.getPipeline().destory();
						
						if(redisUtil != null){
							
							if(cookieList.size() > 0){//重新放回去
								
								String cookieQueue = SysUtil.getValue("redis.trade_cookies");
								
								//cookie入队
								for(String cookie : cookieList){
									
									redisUtil.lpush(cookie, cookieQueue);
									
								}
								logger.info("Cookie重新入队redis["+cookieQueue+"]队列中！");
							}
							
							redisUtil.destroy();
							logger.info("Redis连接池关闭成功！");
						}
					}
				})
				.usePageEncoding(PageEncoding.UTF8);
		
		CrawlTask spider = builder.build();
		
		//如果设置了代理最好设置下超时时间
		//spider.getDownloader().setTimeout(3);
		
		CrawlManager.getInstance().start(spider);
	}
	
	public static final class RedisDynamicEntranceImpl extends DynamicEntrance {

		public boolean putReq(StartContext context, String disQueue, int diffHours){
			
			int leaveNum = redisUtil.size(disQueue);
			logger.info("=========>队列["+disQueue+"]中任务数量："+leaveNum);
			
			if(leaveNum == 0){
				logger.info("队列["+disQueue+"]中没有剩余任务！！");
				return false;
			}
			
			try {
				TaskTrade taskTrade = (TaskTrade) redisUtil.bpop(disQueue);
				
				try {
					Date catchDate = DateUtils.parseDate(taskTrade.getCatchDate(), new String[]{"yyyy-MM-dd HH:mm:ss"});
					catchDate = DateUtils.addHours(catchDate, diffHours);
					
					if(new Date().after(catchDate)){//当前时间>=上次采集时间+间隔小时
						String prodUrl = "https://item.taobao.com/item.htm?id="+taskTrade.getProdId();
						logger.info("=========>产品："+prodUrl);
						
						PageRequest req = context.createPageRequest(prodUrl, PayLogProccessor1.class, 0);
						req.putParams("itemId", taskTrade.getProdId());
						req.putParams("shopId", taskTrade.getShopId());
						req.putParams("type", taskTrade.getShopType());
						req.putParams("catchDate", taskTrade.getCatchDate());
						req.putParams("lastDate", taskTrade.getLastCatch());
						req.putParams("monthSale", taskTrade.getMonthSale());
						
						context.injectSeed(req);
						return true;
					}else{
						
						//重新入队到对头
						try {
							redisUtil.rpush(taskTrade, disQueue);
							logger.info("产品ID["+taskTrade+"]重新入队对头["+disQueue+"]成功");
						} catch (Exception e) {
							e.printStackTrace();
							logger.error("产品ID["+taskTrade+"]重新入队对头["+disQueue+"]失败", e);
						}
						return false;
						
					}
					
				} catch (ParseException e) {
					e.printStackTrace();
				}
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			return false;
		}
		
		@Override
		public List<StartContext> loadStartContext() {
			
			StartContext context = new StartContext();
		
			LinkedHashMap<String, Integer> disQueues = new LinkedHashMap<String, Integer>();
			
			disQueues.put(SysUtil.getValue("redis.task_trade_product_id_hot"), FinalConstants.DIFF_HOURS_HOT);
			disQueues.put(SysUtil.getValue("redis.task_trade_product_id_normal"), FinalConstants.DIFF_HOURS_NORMAL);
			disQueues.put(SysUtil.getValue("redis.task_trade_product_id_unsale"), FinalConstants.DIFF_HOURS_UNSALE);
			
			String cookieQuene = SysUtil.getValue("redis.trade_cookies");
			String cookie = SysUtil.getValue("trade.cookie");//默认Cookie
			
			if(cookieList.size() == 0){
				for(int i = 0; i < FinalConstants.TRADE_COOKIE_NUM; i++){
					String redisCookie = redisUtil.rpopStr(cookieQuene);
					
					if(StringUtils.isNotBlank(redisCookie)){
						cookieList.add(redisCookie);
						logger.info("从redis["+cookieQuene+"]队列中获取Cookie:"+ redisCookie);
					}
				}
			}
			
			if(cookieList.size() > 0){//随机获取动态cookie
				
				int index = (int)(Math.random() * cookieList.size());
				cookie = cookieList.get(index);
			}
			
			context.putContextAttribute("cookie", cookie);
			context.putContextAttribute("redisUtil", redisUtil);
			
			getTask(context, disQueues);
			
			for(int i = 0; i < 5; i++){//没有获取到任务，等待一段时间后再获取5次
				if(context.isEmpty()){
					
					logger.info("队列中没有剩余任务,等待"+FinalConstants.WAIT_TASK_SECONDS+"秒确认...");
					try {
						TimeUnit.SECONDS.sleep(FinalConstants.WAIT_TASK_SECONDS);
						
						getTask(context, disQueues);
					} catch (InterruptedException e) {
						logger.error("队列中没有剩余任务，等待时被中断！", e);
					}
				}else{
					break;
				}
			}
			
			return Arrays.asList(context);
		}

		private void getTask(StartContext context, LinkedHashMap<String, Integer> disQueues) {
			for(int i = 0; i < FinalConstants.REDIS_GET_PRODUCT_ID_FOR_TRADE; i++){
				
				for(String disQueue : disQueues.keySet()){
					
					if(this.putReq(context, disQueue, disQueues.get(disQueue))){
						break;
					}
				}
			}
		}
		
		public boolean continueLoad() {
			return true;//循环采集
		}

	}
	
}
