package com.sopaths.crawler.$51job;

import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.inject.Named;

import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClients;
import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.sopaths.contract.crawler.City;
import com.sopaths.contract.crawler.CrawlerTaskSource;
import com.sopaths.contract.crawler.CrawlerTaskType;
import com.sopaths.crawler.CityCrawler;
import com.sopaths.crawler.tasks.CityCrawlerTask;
import com.sopaths.crawler.tasks.CrawlerTask;

@Named("51JobCityCrawler")
public class $51JobCityCrawler extends CityCrawler {

	private Logger logger = Logger.getLogger($51JobCityCrawler.class);

	@Override
	protected String assembleURL(CrawlerTask<?> task) {
		return "http://search.51job.com/jobsearch/";
	}

	@Override
	protected void assembleCityCrawlerTaskResult(CityCrawlerTask task, Document document) {
		try {
			logger.info("Assemble city crawler task result.");
			Elements cityScriptElements = document.select("script[src^=http://js.51jobcdn.com/in/js/2009/jobarea_array_c.js]");
			String jsUrl = null;
			for (Element element : cityScriptElements) {
				jsUrl = element.attr("src");
			}
			HttpGet httpGet = new HttpGet(jsUrl);
			logger.debug("Sending HttpGet request. URL: " + jsUrl);
			try (CloseableHttpResponse response = getHttpClient().execute(httpGet)) {
				logger.info("Received HttpResponse. Status: " + response.getStatusLine());
				HttpEntity entity = response.getEntity();
				List<City> cities = new LinkedList<City>();
				try (Scanner scanner = new Scanner(entity.getContent(), "gb2312")) {
					Pattern pattern = Pattern.compile("ja\\[\\'(.*)\\'\\]=\\'(.*)\\'");
					while (scanner.hasNext()) {
						String line = scanner.nextLine();
						logger.trace("City javascript line: " + line);
						Matcher matcher = pattern.matcher(line);
						if (matcher.find()) {
							City city = new City();
							city.setName(matcher.group(2));
							city.setCode(matcher.group(1));
							if (!provinces.contains(city.getCode())) {
								cities.add(city);
							}
							logger.trace("City: name-" + city.getName() + ";code-" + city.getCode());
						}
					}
				}
				task.setResult(cities);
			} catch (Exception ex) {
				logger.error("An error occurred while crawling cities.", ex);
				throw ex;
			}
		} catch (Exception ex) {
			logger.error("An error occurred while assembling city crawler result.", ex);
		}
	}

	private Set<String> provinces = new HashSet<>(Arrays.asList(/*"010000","020000",*/"030000",/*"040000",*//*"050000",*//*"060000",*/"070000","080000","090000","100000","110000","120000","130000","140000","150000","160000","170000","180000","190000","200000","210000","220000","230000","240000","250000","260000","270000","280000","290000","300000","310000","320000"/*,"330000","340000","350000","360000"*/));

	public static void main(String[] args) {
		$51JobCityCrawler crawler = new $51JobCityCrawler();
		crawler.setHttpClient(HttpClients.createDefault());
		CityCrawlerTask task = new CityCrawlerTask();
		task.setType(CrawlerTaskType.CityCrawlerTask);
		task.setSource(CrawlerTaskSource.$51Job);
		crawler.crawl(task);
		task.finish();
		for (City city : task.getResult()) {
			if (city.getCode().endsWith("0000")) {
				System.out.print("\"" + city.getCode() + "\",");
			}
		}
	}
}
