package cn.edu.tsinghua.xkk.spider;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.ConnectException;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;

import org.htmlparser.Node;
import org.htmlparser.Parser;
import org.htmlparser.Remark;
import org.htmlparser.Tag;
import org.htmlparser.Text;
import org.htmlparser.nodes.TextNode;
import org.htmlparser.tags.*;
import org.htmlparser.util.EncodingChangeException;
import org.htmlparser.util.NodeIterator;
import org.htmlparser.util.NodeList;
import org.htmlparser.util.ParserException;

import cn.edu.tsinghua.xkk.entity.*;

/**
 * the class of spider
 * @author shibei
 *
 */
public class XKKSpider implements Runnable{
	
	private static final String CHARSET_STRING = "charset=";
	private String[] startURLs;//the seed of the spider
	private Queue<String> queueURLs;//the queue of BFS
	private Map<String, Integer> isSearchedURLs;
	private Map<String, List<String>> robotTxtCache;
	private Parser p;
	private static int maxMemorySize = 1000000000;
	private static int currentMemorySize = 0;
	private static String parentPath = "";//the parent path of the file
	public static Integer ID = 0;
	
	public XKKSpider(String[] startURLs)
	{
		this.startURLs = startURLs;
		queueURLs = new LinkedList<String>();
		isSearchedURLs = new Hashtable<String, Integer>();
		robotTxtCache = new Hashtable<String, List<String>>();
	}
	
	public static String getParentPath()
	{
		return parentPath;
	}
	
	public static void setParentPath(String p)
	{
		parentPath = p;
	}
	
	public void run() {
		// TODO Auto-generated method stub
		try
		{
			for(int i = 0; i < startURLs.length; i++)
			{
				queueURLs.add(startURLs[i]);
				isSearchedURLs.put(startURLs[i], ID++);
			}
			
			while(!queueURLs.isEmpty() && currentMemorySize <= maxMemorySize)
			{
				String u = (String) queueURLs.peek();
				
				if(u != null)
				{
					parseURL(u);
				}
				
				queueURLs.remove();
			}
		}
		catch(Exception e)
		{
			e.printStackTrace();
		}
	}
	
	/**
	 * parse the URL
	 * @param URL
	 */
	void parseURL(String URL)
	{
		if(canBeSearched(URL))
		{
			parseHTML(URL);
		}
	}
	
	/**
	 * return the ID of page
	 * @return
	 */
	int getID(String URL)
	{
		if(isSearchedURLs.containsKey(URL))
		{
			return isSearchedURLs.get(URL);
		}
		return -1;
	}
	
	/**
	 * parse the html of the URL
	 * @param URL
	 */
	void parseHTML(String u)
	{
		int ID = getID(u);
		XKKPage page;
		if(ID != -1)
		{
			page = new XKKPage(ID, u);
		}
		else
		{
			page = null;
		}
		
		try {
			System.out.println(u);
			String content = "";
			p = new Parser();
			p.setURL(u.trim());
			page.setConnection(true);
			NodeIterator nIterator = p.elements();
			if(u.startsWith("http://"))
			{
				page.setType("http");
			}
			else if(u.startsWith("ftp://"))
			{
				page.setType("ftp");
			}
			else if(u.startsWith("file://"))
			{
				page.setType("file");
			}
			try
			{
				while(nIterator.hasMoreNodes())
				{
					Node n = nIterator.nextNode();
					parseNode(n, page);
				}
			}
			catch(EncodingChangeException ece)
			{
				p.reset();
				while(nIterator.hasMoreNodes())
				{
					Node n = nIterator.nextNode();
					parseNode(n, page);
				}
			}
			URLConnection uc = new URL(u).openConnection();
			uc.connect();
			InputStream is = uc.getInputStream();
			BufferedReader br = new BufferedReader(new InputStreamReader(is, p.getEncoding()));
			String temp = "";
			while((temp = br.readLine()) != null)
			{
				content += temp;
			}
			currentMemorySize += content.length();
			br.close();
			is.close();
			page.setAllContent(content);
		} 
		catch (ParserException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} 
		catch(ConnectException e)
		{
			e.printStackTrace();
		}
		catch(SocketException e)
		{
			e.printStackTrace();
		}
		catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		catch(Exception e)
		{
			e.printStackTrace();
		}
		finally
		{
			XKKFile file = new XKKFile(page);
			file.writeToFile(parentPath);
		}
	}
	
	/**
	 * parse the node in the html
	 * @param n
	 * @param page
	 */
	private void parseNode(Node n, XKKPage page) {
		// TODO Auto-generated method stub
		if(n instanceof Tag)
		{
			Tag t = (Tag)n;
			if(t instanceof MetaTag) //set encoding
			{
				try {
					if(t.getAttribute("Content") != null)
					{
						String s = t.getAttribute("Content");
						int index = s.toLowerCase().indexOf(CHARSET_STRING);
						if(index != -1)
						{
							int j = index + CHARSET_STRING.length();
							
							while(j < s.length() && s.charAt(j) != ';')
							{
								j++;
							}
							if(j == s.length() - 1)
								j++;
							p.setEncoding(s.substring(index + CHARSET_STRING.length(), j));
							page.setEncoding(s.substring(index + CHARSET_STRING.length(), j));
						}
					}
				} catch (Exception e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
			if(t instanceof TitleTag)
			{
				if(n != null && n.getChildren()!= null)
				page.setTitle(n.getFirstChild().getText());
				System.out.println(page.getTitle());
			}
			
			if(t instanceof LinkTag)
			{
				processLink((LinkTag)t, page);
			}
			
			NodeList nList = t.getChildren();
			if(nList != null)
			{
				NodeIterator nIterator = nList.elements();
				try {
					while(nIterator.hasMoreNodes())
					{
						parseNode(nIterator.nextNode(), page);
					}
				} catch (ParserException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
		}
		else if(n instanceof TextNode)
		{
			String s = n.getText();
			s = s.replace("\n", " ");
			s = s.trim();
			if(s.compareTo("")!= 0)
			{
			//	System.out.println("fuck" + n.toHtml(false) + "fuck");
				if(n.getParent() != null && n.getParent() instanceof Tag)
				{
					Tag t = (Tag)n.getParent();
			//		System.out.println(t.getRawTagName());
				}
			//	System.out.println("fuck1" + n.getPage().getText(n.getStartPosition(), n.getEndPosition()) + "fuck");
			}
		}
		else if(n instanceof Remark)
		{
			//System.out.println(n.toHtml());
		}
	}

	/**
	 * prepocess the link
	 * @param t
	 */
	private void processLink(LinkTag t, XKKPage page) {
		// TODO Auto-generated method stub
		String link = t.getLink();
		link = link.trim();
		if(link.compareTo("") == 0 || link == null)
		{
			return;
		}
		
		if(link.startsWith("www"))
		{
			link = "http://" + link;
		}
		
		if(!link.startsWith("www") && !link.startsWith("ftp")
				&& !link.startsWith("http"))
		{
			link = "file://" + link;
		}
				
		if(!isSearched(link) && currentMemorySize < maxMemorySize)
		{
			queueURLs.add(link);
			isSearchedURLs.put(link, ID++);
		}
		
		if(!page.linkChildren.contains(getID(link)) && getID(link) != -1)
			page.linkChildren.add(getID(link));
	}

	/**
	 * check if URL has been searched
	 * @param URL
	 * @return
	 */
	boolean isSearched(String URL)
	{
		String anotherURL;
		
		if(URL.endsWith("/"))
		{
			anotherURL = URL.substring(0, URL.lastIndexOf("/"));
		}
		else
		{
			anotherURL = URL + "/";
		}
		
		if(isSearchedURLs.size() > 0)
		{
			if(!isSearchedURLs.containsKey(URL) && !isSearchedURLs.containsKey(anotherURL))
				return false;
		}
		else
			return false;
		
		return true;
	}
	
	/**
	 * check if URL can be checked
	 * @param URL
	 * @return
	 */
	boolean canBeSearched(String u)
	{
		try {
			URL url = new URL(u);
			String host = url.getHost();
			
			List<String> isNotAllowed;
			if((isNotAllowed = robotTxtCache.get(host)) != null)
			{
				for(Iterator<String> it = isNotAllowed.iterator(); it.hasNext();)
				{
					String tempSite = (String) it.next();
					if(u.startsWith(tempSite))
						return false;
				}
			}
			else
			{
				try
				{
					URL robotTxtURL = new URL("http://"+host+"/robots.txt");
					URLConnection uc = robotTxtURL.openConnection();
					InputStreamReader isr = new InputStreamReader(uc.getInputStream());
					BufferedReader br_robotTxt = new BufferedReader(
							new InputStreamReader(uc.getInputStream()));
					isNotAllowed = new LinkedList<String>();
					String tempLine = "";
					while((tempLine = br_robotTxt.readLine()) != null)
					{
						if(tempLine.startsWith("Disallow:"))
						{
							String notAllowedPath = tempLine.substring("Disallow:".length());
							if(notAllowedPath.contains("#"))  //move the annotation out
							{
								notAllowedPath = notAllowedPath.substring(0, notAllowedPath.indexOf("#"));
							}
							notAllowedPath = notAllowedPath.trim();
							if(notAllowedPath.endsWith("/"))
							{
								notAllowedPath = notAllowedPath.substring(0, notAllowedPath.lastIndexOf("/"));
							}
							notAllowedPath = "http://" + host + notAllowedPath.trim();
							isNotAllowed.add(notAllowedPath);
							System.out.println("Disallow:" + notAllowedPath);
						}
					}
					br_robotTxt.close();
					isr.close();
					robotTxtCache.put(host, isNotAllowed);
					for(Iterator<String> it = isNotAllowed.iterator(); it.hasNext();)
					{
						String tempPath = (String) it.next();
						if(u.startsWith(tempPath))
						{
							return false;
						}
					}
				}
				catch(Exception e)
				{
					return true; //robot.txt does not exist
				}
			}
			
		} catch (MalformedURLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return true;
	}
}

