package com.stray.crawl.worker;

import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.pool.PoolStats;

import com.stray.crawl.Constants.DefaultSetting;
import com.stray.crawl.Constants.PlanCategory;
import com.stray.crawl.BeanContext;
import com.stray.crawl.ResultContext;
import com.stray.crawl.TaskContext;
import com.stray.crawl.excp.WorkerException;
import com.stray.crawl.model.CrawlPlan;
import com.stray.crawl.model.PlanField;
import com.stray.crawl.model.dao.PlanRepository;
import com.stray.crawl.supt.EventListener;
import com.stray.crawl.supt.EventPublisher;
import com.stray.crawl.supt.LRUCache;
import com.stray.crawl.worker.task.BinaryCrawlTask;
import com.stray.crawl.worker.task.CrawlTask;
import com.stray.crawl.worker.task.DebugCrawlTask;
import com.stray.crawl.worker.task.DocumentCrawlTask;

public class TaskService implements EventPublisher<CrawlTask, TaskStatus> {

	private final Log log = LogFactory.getLog(getClass());

	// input parameter
	private int liveTime = BeanContext.getIntConfig("connection.live_time", DefaultSetting.CONNECTION_LIVE_TIME);
	private int maxPerRoute = BeanContext.getIntConfig("connection.max_per_route", DefaultSetting.MAX_PER_ROUTE);
	private int maxConcurrent = BeanContext.getIntConfig("connection.max_concurrent", DefaultSetting.MAX_CONCURRENT);;
	private int maxPending = BeanContext.getIntConfig("connection.max_pending", DefaultSetting.MAX_PENDING);

	// data indexing & storage
	@SuppressWarnings("unchecked")
	private LRUCache<CrawlTask, Future<ResultContext>> cache = BeanContext.get(LRUCache.class);
	private PlanRepository planRepo = BeanContext.get(PlanRepository.class);

	// executor
	private ClientConnectionManager connMgr = BeanContext.get(ClientConnectionManager.class);
	private ExecutorService executor = BeanContext.get(ExecutorService.class);

	// task management list
	private List<CrawlTask> pendingTasks;
	private List<CrawlTask> runningTasks;

	// event listener
	private List<EventListener<CrawlTask, TaskStatus>> listeners;

	public TaskService() {
		this.pendingTasks = Collections.synchronizedList(new ArrayList<CrawlTask>());
		this.runningTasks = Collections.synchronizedList(new ArrayList<CrawlTask>(DefaultSetting.MAX_CONCURRENT));

		// create listeners container
		this.listeners = new ArrayList<EventListener<CrawlTask,TaskStatus>>();
	}

	public void releaseConnection() {
		connMgr.closeExpiredConnections();
		connMgr.closeIdleConnections(liveTime, TimeUnit.MINUTES);
	}

	public void submit(CrawlTask task) {
		if (pendingTasks.size() >= maxPending) {
			throw new WorkerException("Too many pending jobs. Max: " + maxPending);
		} else if (task == null) {
			return ;
		}

		pendingTasks.add(task);
		organize(task);
	}

	public CrawlTask submit(String accessUrl) {
		return submit(accessUrl, new HashMap<String, String>(), new HashMap<String, String>());
	}

	public CrawlTask submit(String accessUrl, Map<String, String> configs) {
		return submit(accessUrl, configs, new HashMap<String, String>());
	}

	public CrawlTask submit(String accessUrl, Map<String, String> configs, Map<String, String> params) {
		CrawlTask task = newTask(accessUrl, configs, params);
		submit(task);

		return task;
	}

	@Override
	public void onCallback(CrawlTask task) {
		runningTasks.remove(task);
		organize(task);
	}

	@Override
	public void onEventRaise(CrawlTask source, TaskStatus event) {
		try {
			// Caught all through exception, to prevent interrupting next flow
			for (EventListener<CrawlTask, TaskStatus> listener : listeners) {
				listener.onEvent(source, event);
			}
		} catch (Exception ex) {
			log.info("Error occur when propagating event message to listeners. ", ex);
		}
	}

	public ResultContext obtainResult(CrawlTask task) throws InterruptedException, ExecutionException {
		Future<ResultContext> future = cache.get(task);

		if (future != null && future.isDone()) {
			return future.get();
		} else {
			return null;
		}
	}

	public CrawlTask newTask(String accessUrl, Map<String, String> configs, Map<String, String> params) {
		CrawlTask task = null;

		CrawlPlan plan = planRepo.findMatchCrawlPlan(accessUrl);

		if (plan != null) {
			String category = plan.getPlanCategory();
			TaskContext taskCtx = newTaskContext(accessUrl, plan, configs, params);

			if (PlanCategory.BINARY.equals(category)) {
				task = new BinaryCrawlTask(this, taskCtx);
			} else if (PlanCategory.DOCUMENT.equals(category)) {
				task = new DocumentCrawlTask(this, taskCtx);
			} else if (PlanCategory.DEBUG.equals(category)) {
				task = new DebugCrawlTask(this, taskCtx);
			} else {
				task = new CrawlTask(this, taskCtx);
			}
		}

		if (task == null) {
			log.warn("Not matched crawl plan found for URL - " + accessUrl);
		}

		return task;
	}

	public void addListener(EventListener<CrawlTask, TaskStatus> listener) {
		if (listener != null) {
			listeners.add(listener);
		}
	}

	public void removeListener(EventListener<CrawlTask, TaskStatus> listener) {
		if (listeners.contains(listener)) {
			listeners.remove(listener);
		}
	}

	// Private support method
	private synchronized void organize(CrawlTask t) {
		int pendingSize = pendingTasks.size();
		int runningSize = runningTasks.size();

		if (runningSize >= maxConcurrent || pendingSize == 0) {
			return ;
		}

		List<CrawlTask> nextTasks = new ArrayList<CrawlTask>();
		Map<String, Integer> runningCount = new HashMap<String, Integer>();

		for (CrawlTask task : runningTasks) {
			String host = task.getTaskContext().getUrl().getHost();

			Integer count = runningCount.get(host);
			count = count != null ? count + 1 : 1;
			runningCount.put(host, count);
		}

		for (CrawlTask task : pendingTasks) {
			String host = task.getTaskContext().getUrl().getHost();
			Integer count = runningCount.get(host);

			if (count == null || count < maxPerRoute) {
				nextTasks.add(task);
				runningSize++;

				count = count != null ? count + 1 : 1;
				runningCount.put(host, count);

				if (runningSize >= maxConcurrent) {
					break ;
				}
			}
		}

		for (CrawlTask task : nextTasks) {
			try {
				Future<ResultContext> result = executor.submit(task);

				pendingTasks.remove(task);
				runningTasks.add(task);

				cache.put(task, result);
			} catch (Exception ex) {
				log.info("Execute task fail. task: " + task, ex);
			}
		}
	}

	private TaskContext newTaskContext(String url, CrawlPlan plan, Map<String, String> configs, Map<String, String> params) {
		try {
			TaskContext taskCtx = new TaskContext();

			taskCtx.setUrl(new URL(url));
			taskCtx.setPlanName(plan.getName());

			taskCtx.setMethod(plan.getRequestMethod());
			taskCtx.setAgent(plan.getRequestAgent());
			taskCtx.setTimeout(plan.getConnectionTimeout());

			taskCtx.setUseReferer(plan.isEnableReferer());
			taskCtx.setUseCache(plan.isEnableCache());

			taskCtx.setCharset(plan.getDocumentCharset());
			taskCtx.setExpress(plan.getExpressQuery());
			taskCtx.setAction(plan.getActionQuery());

			List<PlanField> fieldList = plan.getFields();
			if (fieldList != null) {
				int count = fieldList.size();

				String[] fields = new String[count];
				String[] queries = new String[count];

				for (int i = 0; i < count; i++) {
					PlanField field = fieldList.get(i);

					String name = field.getName();
					String qry = field.getQuery();

					fields[i] = name;
					queries[i] = qry;
				}

				taskCtx.setFields(fields);
				taskCtx.setQueries(queries);
			}

			taskCtx.setConfigs(configs);
			taskCtx.setParams(params);

			return taskCtx;
		} catch (Exception ex) {
			throw new WorkerException("Fail to new task context instance. ", ex);
		}
	}

	public synchronized void printStatus() {
		PoolingClientConnectionManager mgr = (PoolingClientConnectionManager) connMgr;

		ThreadPoolExecutor exec = (ThreadPoolExecutor) this.executor;
		PoolStats stats = mgr.getTotalStats();

		log.info(String.format("> Thread (Normal/Core/Active/Max/Largest) : %d / %d / %d / %d / %d",
				 exec.getPoolSize(), exec.getCorePoolSize(), exec.getActiveCount(), exec.getMaximumPoolSize(), exec.getLargestPoolSize()));
		log.info(String.format("> Task (Normal/Completed) : %d / %d",
				 exec.getTaskCount(), exec.getCompletedTaskCount()));
		log.info(String.format("> Connection (Max/Available/Leased/Pending) : %d / %d / %d / %d",
				 stats.getMax(), stats.getAvailable(), stats.getLeased(), stats.getPending()));

		log.info("> Pending Count  : " + pendingTasks.size());
		log.info("> Running Count  : " + runningTasks.size());
		log.info("> Exceuted Count : " + cache.size() + " (" + cache.keySet() + ")");
	}

}