package com.blue.crawler;

import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;

import com.blue.core.util.BeanUtil;
import com.blue.core.util.UrlUtil;
import com.blue.crawler.annotation.CrawlerConfig;
import com.blue.crawler.annotation.Parser;
import com.blue.crawler.engine.Download;
import com.blue.crawler.engine.Engine;
import com.blue.crawler.engine.FileDownload;
import com.blue.crawler.engine.ListDetailCrawl;
import com.blue.crawler.engine.ListDetailEngine;
import com.blue.crawler.util.CrawlerInvoker;

/**
 * 爬虫启动器
 * 
 * @author zhengj
 * @since 1.0 2017年5月29日
 */
public class CrawlerStarter
{
	private static Logger logger = LoggerFactory.getLogger(CrawlerStarter.class);
	
	private final int thread;
	private final String root;
	private final CrawlerInvoker invoker;
	private final ExecutorService executorService;
	private final ApplicationContext applicationContext;
	private final Download download;
	
	public CrawlerStarter(int thread, String root, CrawlerInvoker invoker, ApplicationContext applicationContext)
	{
		this.thread = thread;
		this.root = root;
		this.invoker = invoker;
		this.applicationContext = applicationContext;
		this.executorService = Executors.newFixedThreadPool(thread);
		this.download = new FileDownload(root);
	}

	public void start()
	{
		Map<String, List<CrawlerConfig>> map = this.group();
		if (map == null)
			return;
		
		map.forEach((k, v) ->
		{
			executorService.submit(() ->
			{
				v.forEach(e -> this.startCrawler(invoker, e));
			});
		});
		
	}
	
	private Map<String, List<CrawlerConfig>> group()
	{
		List<CrawlerConfig> configList = Parser.getInstance().listConfig();
		logger.info("爬虫配置个数：{}", configList.size());
		if (configList.isEmpty())
			return null;
		
		Map<String, List<CrawlerConfig>> map = configList.stream().collect(Collectors.groupingBy(c -> UrlUtil.getHost(c.getUrl())));
		return map;
	}
	
	private void startCrawler(CrawlerInvoker invoker, CrawlerConfig config)
	{
		logger.info("开始爬虫：{}", config.getName());
		
		Class<?> crawlClazz = config.getCrawler();
		Object crawl = null;
		if (crawlClazz.isAnnotationPresent(Component.class))
		{
			crawl = applicationContext.getBean(crawlClazz);
		}
		else
		{
			crawl = BeanUtil.initBean(crawlClazz, applicationContext);
		}
		
		Engine engine = null;
		if (crawl instanceof ListDetailCrawl)
		{
			engine = new ListDetailEngine<>(invoker, (ListDetailCrawl<?>)crawl, download);
		}
		
		if (engine == null)
		{
			logger.warn("找不到正确的爬虫解析器");
			return;
		}
		
		try
		{
			if (config.getUrl().indexOf("{page}") == -1)
			{
				this.startCrawler(config.getUrl(), config, engine, 1);
			}
			else
			{
				if (!config.isReverse())
				{
					for (int i = config.getStartPage(); i <= config.getEndPage(); i+=config.getStep())
					{
						String url = config.getUrl().replace("{page}", String.valueOf(i));
						this.startCrawler(url, config, engine, i);
					}
				}
				else
				{
					for (int i = config.getEndPage(); i >= config.getStartPage(); i-=config.getStep())
					{
						String url = config.getUrl().replace("{page}", String.valueOf(i));
						this.startCrawler(url, config, engine, i);
					}
				}
			}
		}
		catch (CrawlerException e)
		{
			logger.warn(e.getMessage());
		}
		catch (Exception e)
		{
			logger.error("采集有错误：" + config.getName(), e);
		}
	}
	
	private void startCrawler(String url, CrawlerConfig config, Engine engine, int page) throws Exception
	{
		logger.info("解析：{}", url);
		String html = engine.invoke(invoker, url, config);
		engine.handle(html, config, page);
	}

	public void stop()
	{
		executorService.shutdown();
	}

	public int getThread()
	{
		return thread;
	}

	public String getRoot()
	{
		return root;
	}
	
}
