package com.sopaths.crawler;

import javax.inject.Inject;

import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import com.sopaths.crawler.tasks.CrawlerTask;

public abstract class AbstractCrawler implements Crawler {

	private Logger logger = Logger.getLogger(AbstractCrawler.class);

	@Inject
	private CloseableHttpClient httpClient;

	private String responseEncoding = "UTF-8";

	@Override
	public void crawl(CrawlerTask<?> task) {
		try {
			logger.info("Start crawling. Type: " + task.getType() + ". Source: " + task.getSource() + ".");
			String url = assembleURL(task);
			HttpGet httpGet = new HttpGet(url);
			logger.debug("Sending HttpGet request. URL: " + url);
			try (CloseableHttpResponse response = getHttpClient().execute(httpGet)) {
				logger.info("Received HttpResponse. Status: " + response.getStatusLine());
				HttpEntity entity = response.getEntity();
				String html = EntityUtils.toString(entity, responseEncoding);
				logger.trace("City page html: " + html);
				Document document = Jsoup.parse(html);
				assembleTaskResult(task, document);
				task.finish();
			}
		} catch (Exception ex) {
			logger.error("An error occurred while crawling.", ex);
		}
	}

	protected abstract String assembleURL(CrawlerTask<?> task);

	protected abstract void assembleTaskResult(CrawlerTask<?> task, Document document);

	public void setHttpClient(CloseableHttpClient httpClient) {
		this.httpClient = httpClient;
	}

	protected CloseableHttpClient getHttpClient() {
		return httpClient;
	}

	public String getResponseEncoding() {
		return responseEncoding;
	}

	public void setResponseEncoding(String responseEncoding) {
		this.responseEncoding = responseEncoding;
	}

}
