package priv.lirenhe.dissertation.component.crawler_robot.crawler_robot_executor;

import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import priv.lirenhe.dissertation.cacher.Cacher;
import priv.lirenhe.dissertation.cacher.CacherManager;
import priv.lirenhe.dissertation.component.crawler_robot.crawler_actions.crawler_actions_executor.CrawlerActionsExecutor;
import priv.lirenhe.dissertation.component.crawler_robot.url_manager.url_manager_executor.UrlManagerExecutor;

public class CrawlerRobotExecutor {
	
	
	//必须通过多线程完成
	public void execute(){
		
		System.out.println("CrawlerRobotExecutor execute--");
		
		final CrawlerActionsExecutor crawlerActionsExecutor = new CrawlerActionsExecutor();
		final UrlManagerExecutor urlManagerExecutor = new UrlManagerExecutor();
		
		final CacherManager cacherManager = new CacherManager();
		
		int threadNumber = 2;
		ExecutorService fixedThreadPool = Executors.newFixedThreadPool(threadNumber);
		for(int i = 1;i <= threadNumber ;i++){
			
			fixedThreadPool.execute(new Runnable(){
				
				@Override
				public void run() {
					try{
						String realUrl = null;
						Map<String,String> exeMap = null;
						String webContent = null;
						Cacher webContentCacher = null;
						int EmptyFlag = 0;
						while(true){
							exeMap = urlManagerExecutor.execute();
							if(exeMap != null && !exeMap.isEmpty()){
								EmptyFlag = 0;
								realUrl = exeMap.get("realUrl");
								if(realUrl != null){
									webContent = crawlerActionsExecutor.execute(realUrl);
									
									System.out.println(Thread.currentThread().getName() + "--realUrl=" + realUrl);
									
									if(webContent != null){
										
										System.out.println(Thread.currentThread().getName() + "webContent放入缓存中");
										
										webContentCacher = new Cacher();
										webContentCacher.setDatas(webContent);
										System.out.println("cachesSize="+cacherManager.getCacherSize());
										for(;;){
											if(cacherManager.getCacherSize() < 3){
												System.out.println(Thread.currentThread().getName() + "cacher 小于 3 放入缓存中！");
												cacherManager.putCache(realUrl, webContentCacher);
												break;
											}else{
												System.out.println(Thread.currentThread().getName() + "cacher 不小于 3 睡眠2秒！");
												Thread.sleep(2000);
											}
										}
									}else{
										continue;
									}
								}else{
									continue;
								}
							}else{
								Thread.sleep(5000);
							}
							System.out.println(Thread.currentThread().getName() + "在返回url为空从而睡眠后循环次数"+ ++EmptyFlag);
						}
					}catch(Exception e){
						e.printStackTrace();
					}
				}
			});
		}
		fixedThreadPool.shutdown();
		while(true){
			if(fixedThreadPool.isTerminated()){
				break;
			}
			try{
				Thread.sleep(600);
			}catch(InterruptedException e){
				e.printStackTrace();
			}
		}
	}
}

/*
不进行每整个网站地爬取
for(String url:urlOfList){
	webContent = crawlerActionsExecutor.execute(url);
	realUrlAndWebContentOfMap.put(url, webContent);
}
*/

/*
//对webContent进行解析，得到href，然后再次调用爬虫。
//href如何保存
WebContentToUrlParser webContentToUrlParser = new WebContentToUrlParser(webContent);
List<String> urlOfList = webContentToUrlParser.getUrlOfList(realUrl);

//需要建立个quueue作为缓存，当某个线程爬取某个网站抛出异常时就将这些url存入notusedbdb中，
//爬取过的放入到usedBDB中


//提取url后就将fatherurl+子url传给service，service存入。

//缓存
*/


