package com.blue.crawler.annotation;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.blue.crawler.CrawlerException;


public class Parser
{
	private static Logger logger = LoggerFactory.getLogger(Parser.class);
	private static Parser instance;
	
	private Map<Class<?>, CrawlerConfig> cache = new HashMap<>();
	
	private Parser()
	{
	}
	
	public static Parser getInstance()
	{
		if (instance == null)
		{
			synchronized (Parser.class)
			{
				if (instance == null)
					instance = new Parser();
			}
		}
		return instance;
	}
	
	public List<CrawlerConfig> listConfig()
	{
		List<CrawlerConfig> configList = new ArrayList<>();
		cache.forEach((k, v) -> configList.add(v)); 
		return configList;
	}
	
	public CrawlerConfig parse(Class<?> clazz)
	{
		return cache.computeIfAbsent(clazz, c -> this.parseCrawler(c));
	}
	
	private CrawlerConfig parseCrawler(Class<?> clazz)
	{
		Crawler annoCrawler = clazz.getAnnotation(Crawler.class);
		if (annoCrawler == null)
			throw new CrawlerException(clazz.getName() + " 缺少 @Crawler 注解");
		
		logger.info("爬虫解析：{}", clazz.getName());
		
		CrawlerConfig crawlerConfig = new CrawlerConfig();
		crawlerConfig.setName(annoCrawler.name().isEmpty() ? clazz.getName() : annoCrawler.name());
		crawlerConfig.setClazz(clazz);
		crawlerConfig.setCrawler(annoCrawler.value());
		crawlerConfig.setInterval(annoCrawler.interval());
		
		this.parseCrawlerPath(clazz, crawlerConfig);
		this.parseCrawlerProxy(clazz, crawlerConfig);
		this.parseDownload(clazz, crawlerConfig);
		
		return crawlerConfig;
	}
	
	private void parseCrawlerPath(Class<?> clazz, CrawlerConfig crawlerConfig)
	{
		CrawlerPath annoPath = clazz.getAnnotation(CrawlerPath.class);
		if (annoPath == null)
			return;
		
		this.parseCrawlerHeader(annoPath, crawlerConfig);
		crawlerConfig.setUrl(annoPath.url());
		crawlerConfig.setStartPage(annoPath.startPage());
		crawlerConfig.setEndPage(annoPath.endPage());
		crawlerConfig.setStep(annoPath.step());
		crawlerConfig.setReverse(annoPath.reverse());
	}
	
	private void parseCrawlerHeader(CrawlerPath annoPath, CrawlerConfig crawlerConfig)
	{
		String[] hh = annoPath.headers();
		if (hh == null || hh.length == 0)
		{
			crawlerConfig.setHeaders(new Header[0]);
			return;
		}
		
		Header[] headers = new Header[hh.length];
		for (int i = 0; i < hh.length; i++)
		{
			String[] kh = hh[i].split("=");
			if (kh.length != 2)
				throw new IllegalArgumentException("Header配置错误：" + hh[i]);
			
			headers[i] = new BasicHeader(kh[0].trim(), kh[1].trim());
		}
		logger.info("headers 个数：{}", headers.length);
		crawlerConfig.setHeaders(headers);
	}
	
	private void parseCrawlerProxy(Class<?> clazz, CrawlerConfig crawlerConfig)
	{
		CrawlerProxy annoProxy = clazz.getAnnotation(CrawlerProxy.class);
		if (annoProxy == null)
			return;
		
		crawlerConfig.setProxy(true);
		crawlerConfig.setProxyHost(annoProxy.host());
		crawlerConfig.setProxyPort(annoProxy.port());
		crawlerConfig.setProxyType(annoProxy.type());
	}
	
	private void parseDownload(Class<?> clazz, CrawlerConfig crawlerConfig)
	{
		Download annoDownload = clazz.getAnnotation(Download.class);
		if (annoDownload == null)
			return;
		
		crawlerConfig.setDownload(true);
		crawlerConfig.setRoot(annoDownload.root());
		crawlerConfig.setThumbHeight(annoDownload.thumbHeight());
		crawlerConfig.setThumbWidth(annoDownload.thumbWidth());
	}
	
}
