/* MarkusCrawler
 *
 * Copyright (C) 2011 by Markus '[mackan]' Hyttinen <mackanhyttinen@gmail.com>
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 *
 */


package me.crawler;

import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.StringTokenizer;




public class CrawlerThread extends Thread {
	protected int id;
	protected CrawlerQueue uQueue;
	protected CrawlerController controller;
	protected CrawlerFilter filter; 
	
	
	/**
	 * @return the filter
	 */
	public synchronized CrawlerFilter getFilter() {
		return filter;
	}


	/**
	 * @param filter the filter to set
	 */
	public synchronized void setFilter(CrawlerFilter filter) {
		this.filter = filter;
	}


	public void setId(int _id) {
		id = _id;
	}


	public void setQueue(CrawlerQueue _queue){
		uQueue = _queue;
	}
	
	public void setCrawlerController(CrawlerController _controller) {
		controller = _controller;
	}

	public CrawlerThread() {
	}
	public void run() {
		
		//as long as queue has url's to process then keep running
		while (!uQueue.isEmpty()){
			process(uQueue.pop());
			//if more threads can run then start more
			if (controller.getMaxThreads() > controller.getThreadsRunning()) {
				try {
					controller.startThreads();

				} catch (Exception e) {
					System.err.println("THREAD : " + id +"  " + e.toString());
				}
			}
		}
		//let controller know that thread has finished
		controller.threadFinished(id);
	}

	

 	
	/**
	 * The thread invokes the process method for each object in the queue
	 */
	public  synchronized void process(Object o){
	//
		
		CrawlerHTMLPage page = (CrawlerHTMLPage)o;
		URL url = page.getUrl() ;
		String content = "";	
			
		try {
			
			//System.out.println("QUEUE SIZE: "+uQueue.size() + "   PROCESSED PAGES:  "+ controller.getProccesed());
			// try opening the URL
			URLConnection urlConnection = url.openConnection();
			urlConnection.setAllowUserInteraction(false);
	
			InputStream urlStream = url.openStream();
			byte b[] = new byte[1024];
			int numRead = urlStream.read(b);
			content = new String(b, 0, numRead);
				
			while (numRead != -1) {
	
			    numRead = urlStream.read(b);
			    if (numRead != -1) {
				String newContent = new String(b, 0, numRead);
				content += newContent;
			    }
			}
			urlStream.close();
			
			ArrayList <String> linkList = getLinks(content);
			
			
			for(String strLink : linkList){
				
				URL urlLink = null;
			   // System.out.println(strLink);	
			   try{ 	

			    		if(strLink.contains("http")|| strLink.contains("www")){
			    			
			    			urlLink = new URL(strLink);
			    		}		
			    		else if(filter.validUrl(url.getProtocol()+"://"+ url.getHost()+ strLink))
			    				urlLink = new URL(url.getProtocol(),url.getHost(), strLink);
			    		
			    		
			    		
				    if (filter.validUrl(urlLink.toString())){			    	
				    	CrawlerHTMLPage h = new CrawlerHTMLPage(urlLink);				    	
				    	h.setParentId(page.getId());
				    	h.setDepth(page.getDepth()+1);
				    	h.setId(controller.getUniqueNumber());			    	
				    	uQueue.push(h);     
				    }
			
			    } catch (Exception e) {
			    	continue;
			    }
				    
			}
			    	page.setContent(content.getBytes());
			    	page.setDateCrawled(new Timestamp(System.currentTimeMillis()));
				    controller.AddProccesedItem(page);
				 	
			    } catch (Exception e) {
			    	uQueue.incrMaxLinks();
			    }
		 

		}
	// gets all the links from the downloaded content.
   private  ArrayList <String> getLinks(String content) {
		int index = 0;
		String lContent = content.toLowerCase().replaceAll("\\s", " ");
		ArrayList <String> links = new ArrayList<String>();
		
		while ((index = lContent.indexOf("<a ", index)) != -1)
		{
		    if ((index = lContent.indexOf("href", index)) == -1)
		    	break;
		    if ((index = lContent.indexOf("=", index)) == -1) 
		    	break;			
		    
		    String remaining = content.substring(++index);
		    StringTokenizer st = new StringTokenizer(remaining, "\t\n\r\"'>#");
		    String strLink = st.nextToken();
			links.add(strLink);
		}
		return links;
	}
    

}
