package com.wsc.crawler.grabber;


import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;

import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.log4j.Logger;

import com.wsc.crawler.WSCrawler;
import com.wsc.crawler.DNS.AsyncDNSResolver;
import com.wsc.crawler.URLutils.RefineURL;
import com.wsc.crawler.URLutils.URLBean;
import com.wsc.crawler.init.CrawlerConfig;

/* 
 * A multithreaded Grabber performs multiple GET requests from multiple threads.
 * 
 * *** Before running the Grabber, set the frontier first.
 * 
 */
public class Grabber {

	// If this variable is set to true then the crawler will stop.
	
	public static boolean isStopped = false;
	
	// Bean queue which can hold all URLs to be grab.
	// This queue is set by WSCrawler.
	
	private URLBeanQueue queue=new URLBeanQueue();
	
	// Bean queue which can hold all DNS resolved Urls and its Original URLs.
	// This acts as main Local frontier for all threads.
	
	private URLBeanQueue resolvedHosts = new URLBeanQueue();
	
	// bean queue which hold DNS Failed URLs.
	
	private URLBeanQueue unresolvedHosts = new URLBeanQueue();
	
	// Reference to CrawlerConfig which is set by WSCrawler.
	
	private CrawlerConfig config;
	
	// Logger
	
	private static Logger log = Logger.getLogger(Grabber.class.getName());
	
	// max number of grabber threads. which can be set in PoolingClientConnectionManager.
	// this number is set in Configuration.
	
	private int max_threads ;
	
	// Number of grabber threads to be run concurrently.
	//This number is set in Configuration.
	
	private int num_threads ;
	
	
	
	public Grabber(CrawlerConfig config) {
	
		this.config = config;
		
		// Get max_threads from configuration.
		
		max_threads=config.getMax_crawler_threads();
		
		// Get num_threads from configuration.
		
		num_threads = config.getCrawler_Threads();
	}

	
	
	// Frontier Setter, set it first to run Grabber.
	
	public void setFrontier(URLBeanQueue queue) {
		this.queue = queue;
	}

	
	// This Method refills Queue.
	
	public synchronized void reFillQueue(){
		
		// LocalClient class performes network activitis like, sending get request to frontier server
		// and parsing reponse from frontier, and constructing a URLBeanQueue.
		
		LocalClient client=new LocalClient(config);
		try {
			
			// set queue here
			
			this.setFrontier(client.getUrlsFromFroniter());
			
			log.info("Queue is Set in Grabber is "+queue.size());
			
		} catch (UnknownHostException e) {
			
			log.warn("Exception Occured while Refilling Queue.",e);
			
		} catch (MalformedURLException e) {
		 
			log.warn("Exception Occured while Refilling Queue.",e);
		}
		
	 
	}
	
	
	// Resolves URLs and replaces host part of each URL with Its Resolved IP.
	
	public  void reFillResolvedHosts(){
		
		// Create a New instance of AsyncDNSResolver.
		
		AsyncDNSResolver asynDNSresolver = new AsyncDNSResolver();
		
		// Refine URLs in URLBeanQueue before resolving them.
		
		refineURLs();
		
		// Set queue in AsyncDNSResolver class, which is to be resolved.
		
		asynDNSresolver.setFullqueue(queue);
		
		// This method Splits the supplied queue into n equal number of small queues, and starts
		// a new Resolver thread for each smallqueue.
		
		asynDNSresolver.initializeThreads();
		
		// get resolved hosts here.
		
		resolvedHosts = asynDNSresolver.getResolvedURLBeans();
		
		// Get DNS failure URLs here.
		
		unresolvedHosts = asynDNSresolver.getUnResolvedURLBeans();
	}
	
	
	// This method is called by WSCrawler, which is intializer of Grabber threads.
	
	public void startGrabber() {

		// This Condition must be check, before starting Grabber. 
		
		if (queue != null && !queue.isEmpty()) {
			
			// Create an HttpClient with the ThreadSafeClientConnManager.
			// This connection manager must be used if more than one thread will
			// be using the HttpClient.
			
			PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
			
			// setMaxTotal is configurable. default value in config file is 100

			cm.setMaxTotal(max_threads);

			// Create a Default Http Client Obj.
			
			final HttpClient httpclient = new DefaultHttpClient(cm);
			
			// Setting some low level parameters for connection.
			
			HttpParams params = httpclient.getParams();
			
			 //set timeout for httpconnection, IMP
			 // if timeout is not set the connection is stuck and waits for infinite time.
			
			HttpConnectionParams.setConnectionTimeout(params, 5000);
			HttpConnectionParams.setSoTimeout(params, 7000);

			 int size = 0;
 
			try {
			 
				// Resolve hosts in queue and replace the host part of each URL with Its resolved IP.
				// This method sets the resolvedHosts object with resolved beans.
				
				reFillResolvedHosts();
				
				
				log.info("Number of resolved hosts are :"
						+ (size = resolvedHosts.size()));
				log.info("Number of Unresolved hosts are :"
						+ unresolvedHosts.size());
				
				// if "size" of queue is greater than number of threads then start
				// number of threads that is set in configuration, else start "size" of threads.

				if (size < num_threads) {
					num_threads = size;
				}
				
				 // Threads Instance Container
				 // which holds reference to all threads.
				
				final List<GetThread> threads = new ArrayList<GetThread>();
 

				for (int i = 0; i < num_threads; i++) {
					/*
					 * Encode the query part of url to repacle special chars
					 * like spaces, quotes with hex chars.
					 */
					// URL tocheckVS=dnsqueue.dequeue();
					/*
					 * check visited status here, by sending VSrequest to the
					 * indexer Server.
					 * 
					 * indexer Server Return with boolean. if(!vc is true){ grab
					 * html OW skip url and get other url from frontier. }
					 */

					// dequeue url from resolved hosts beans
					
					URLBean refinedUrlBean =resolvedHosts.dequeue();
							
					log.debug("Crawling Bean  ::" + refinedUrlBean.toString());
					
					//Null check.
					
					if(refinedUrlBean != null){
					
					// construct default HTTPheader.
					// Set some mandatory Http header fields to request servers.
					
					HttpGet httpget=this.constructDefaultHeader(refinedUrlBean);
							
					// Create a new thread for each httpget request, and add to List<GeThread>.
					
					threads.add(new GetThread(httpclient, httpget, refinedUrlBean,i + 1));
					
					}
				}
				
				log.debug("Number of Threads are : " + threads.size());
				
				// start all the threads in List<GetThread>
				
				for (int j = 0; j < threads.size(); j++) {
					threads.get(j).start();
				}
				
				// join all threads.
				
				for (int j = 0; j < threads.size(); j++) {
					threads.get(j).join();
				}

				// thread controlling code here.
 
						while (true) {
							
							
							try {
								Thread.sleep(5000);
							} catch (InterruptedException e1) {
								// TODO Auto-generated catch block
								e1.printStackTrace();
							}
							
						 
							if (queue != null && !queue.isEmpty()) {
		 
								reFillResolvedHosts();
								
				//			if(!resolvedHosts.isEmpty()){
								
							
							if (Thread.activeCount() <= num_threads) {
								
								log.debug("Thread Count is less than numthreads In Thread :"	+ Thread.currentThread().getId());
								// check for each thread
								for (int i = 0; i < threads.size();) {
									//again nullcheck , if the queue is emptied in forlloop 
									if (queue != null && !queue.isEmpty()) {
									// get thread
									GetThread thread = threads.get(i);
									log.debug("Checking thread at ("+i+") is"+thread.getId());
									// if this thread is not alive
									if (!thread.isAlive()) {
										log.debug("Thread " + i
												+ " is dead, restarting it.");
										//dequeue URLBean here
										URLBean refinedUrlBean =resolvedHosts.dequeue();
										if(refinedUrlBean !=null ){
										HttpGet httpget = null;
										try {
											httpget = constructDefaultHeader(refinedUrlBean);
										} catch (MalformedURLException e) {
											// TODO Auto-generated catch block
											e.printStackTrace();
										}
										
										GetThread tempthread = new GetThread(
												httpclient, httpget,refinedUrlBean, i);
										// replace dead thread with new thread. 
									
										threads.remove(i);
										threads.add(i, tempthread); 
										// start
										threads.get(i).start();
										log.debug("Thread " + i + " started.");
										log.debug("Thread list size is :"+threads.size());
										// join
										try {
											threads.get(i).join();
										} catch (InterruptedException e) {
											// TODO Auto-generated catch block
											e.printStackTrace();
										}
										 i++;
										}
									}
									}else{
										log.info("Queue is empty in for loop");
										if(!isStopped)
										reFillQueue();
										//break forloop
										else break;
									}
								}
							} // if check thread count
							else{
								log.info("Current Thread count is max count=:"+Thread.activeCount());
								
								}
					//		}  
					//		else{
					//			log.info("Resolved hosts Queue is Empty...!");
					//			Thread.sleep(5000);
						//	}
							} else{
								log.info("Queue is Empty...!");
								log.info("Refilling Queue...!");
								// call a method to fill queue.
								
								log.debug("IsStopped ="+isStopped);
								if(!isStopped)
									reFillQueue();
								//break while loop.
									else break;						
							}
						  
							
						}// while

						
				 
 
			} catch (Exception ex) {
				ex.printStackTrace();

			} finally {
				// When HttpClient instance is no longer needed,
				// shut down the connection manager to ensure
				// immediate deallocation of all system resources
				httpclient.getConnectionManager().shutdown();
			}
			// long end=(new Date()).getTime();
			// System.out.println("time taken to crawl "+size+" pages in: "+(end-start)/1000+"sec");
		}// if null check
		else {
			log.fatal("Grabber Recieved An Empty Queue");
			log.fatal("Grabber Exiting....");

			// safe exit
			WSCrawler.stopInstance();
		}
	}

	
	
//	refineURLs() method refines/encodes query part of urls supplied to it in queue Object.
	
	private void refineURLs() {
		
		// check whether queue is empty or not.
		
		if(!queue.isEmpty()){
		
			try {
				
				// Invoke a static method refinedURLs to encode query part of URLs.
				// null means use default encoding type, i.e UTF-8
				
				queue=RefineURL.refinedURLs(queue, null);
				
			} catch (UnsupportedEncodingException e) {
				
				log.warn("Exception occured while refining URLs.",e);
				
			} catch (MalformedURLException e) {
				
				log.warn("Malformed URL Exception occured in refineURLs() method. ",e);
				
			}
		}
		
	}

	
	
	// This Method Constucts a Default HTTP Message to request a URL.
	
	private HttpGet constructDefaultHeader(URLBean refinedUrlBean) throws MalformedURLException {
	
		//Get DNS Resolved URL from Bean.
		
		URL encodedURL= refinedUrlBean.getDnsresolvedHost();
		
		// null check
		
		if(encodedURL != null){
		
		log.debug("Constructing HTTPHeader for URL ("+encodedURL+")");
		
		// Get a New Instance of HttpGet object
		
		HttpGet httpget = new HttpGet(encodedURL.toString());
		
		// Add some mandatory headers.
		
		//this is mandatory, it notifies Webserver to dispatch request to a perticular domain in Server.
		
		httpget.addHeader("Host", refinedUrlBean.getOriginalURL().getHost());
		
		// Set User-Agent header, get it from Configuration.
		
		httpget.addHeader("User-Agent", config.getUser_Agent());
		
		// say to server, that i dont have any cache control
		
		httpget.addHeader("Cache-Control", "no-cache");
		
		// do not track header, dont send me any Cookies etc.
		
		httpget.addHeader("DNT", "1");
		
		// Set Accept header to inform server, that send me only, the following mime data.
		
		httpget.addHeader("Accept",
				"text/*, text/html, text/html;level=1, */*");
		
		//return the constructed Object.
		
		return httpget;
		}
		
		// if encoded URL is null, then return Null.
		
		return null;
	}

	
	
	/**
	 * A thread that performs a GET.
	 */
	
	static class GetThread extends Thread {

		private final HttpClient httpClient;
		private final HttpContext context;
		private final HttpGet httpget;
		private final int id;
		private URLBean bean;

		public GetThread(HttpClient httpClient, HttpGet httpget,URLBean bean, int id) {
			this.httpClient = httpClient;
			this.context = new BasicHttpContext();
			this.httpget = httpget;
			this.id = id;
			this.bean=bean;
		}

		/**
		 * Executes the GetMethod and prints some status information.
		 */
		@Override
		public void run() {

			log.info(" - about to get something from "+bean.toString());

			try {

				// execute the method
				HttpResponse response = httpClient.execute(httpget, context);

				if (response != null) {
					// HeaderFiler
					new HandleHTTPResponse(response, bean);
				} else {
					log.info("Response is null from "
							+ httpget.getURI().toURL());
				}

			} catch (Exception e) {
				httpget.abort();
				log.warn(id + " - error: " + e);
			}
		}

	}

}
