package com.sopaths.crawler;

import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;

import com.sopaths.crawler.tasks.CityCrawlerTask;
import com.sopaths.crawler.tasks.CrawlerTask;


public abstract class CityCrawler extends AbstractCrawler {
	private Logger logger = Logger.getLogger(CityCrawler.class);

	private String url;

	@Override
	protected String assembleURL(CrawlerTask<?> task) {
		logger.debug("Assembled URL: " + url);
		return url;
	}

	@Override
	protected void assembleTaskResult(CrawlerTask<?> task, Document document) {
		CityCrawlerTask innerTask = (CityCrawlerTask) task;
		assembleCityCrawlerTaskResult(innerTask, document);
	}

	protected void assembleCityCrawlerTaskResult(CityCrawlerTask task, Document document) {

	}

	public String getUrl() {
		return url;
	}

	public void setUrl(String url) {
		this.url = url;
	}

/*
	@Override
	public void crawl(CrawlerTask<?> task) {
		try {
			logger.info("Crawl city information from zhilian.com");
			CityCrawlerTask innerTask = (CityCrawlerTask) task;
			HttpGet httpGet = new HttpGet(url);
			try (CloseableHttpResponse response = getHttpClient().execute(httpGet)) {
				HttpEntity entity = response.getEntity();
				String html = EntityUtils.toString(entity, Charset.forName(responseEncoding));
				Document document = Jsoup.parse(html);
				assembleTaskResult(innerTask, document);
			}
		} catch (Exception ex) {
			logger.error("An error occurred while crawling.", ex);
		}
	}
*/
}
