//Author:XING YIFAN A0105591J
import java.io.*;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.*;
import java.net.*;

import org.jsoup.*;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

class WebCrawler {

	private String _filename=null;// file given by user with original host address and RTT
	private final String CRAWFILE="CrawedHosts";//filename for the new file storing all the CrawedPages
	private Vector<String> hostlist= new Vector<String> ();
	private int depth_limit;
	private int error_limit;
	private int current_error;
	
	private DataOutputStream serverWriter= null;

	public WebCrawler (String filename,int l,int e) throws IOException
	{

	   Socket toServer=new Socket("127.0.0.1", 9000);
	serverWriter = new DataOutputStream(toServer.getOutputStream());
		
		
		_filename=filename;
		depth_limit=l;
		error_limit=e;
		current_error=0;
		File hostfile = new File(_filename);
		BufferedReader in= new BufferedReader(new FileReader(hostfile));

		FileWriter tmp_fw= new FileWriter(new File(CRAWFILE));
		//tmp_fw.flush();
		tmp_fw.close();//flushes the File to delete historical data during last test

		
		//read in the base URLs
		try{

			String URL=null;

			while ((URL=in.readLine())!=null)
			{

				URL=URL.substring(0,URL.indexOf(' '));

				//System.out.println("original URL: "+ URL);

				ParallelCrawler mycraw= new ParallelCrawler (URL,0,depth_limit);//for the baseURLs, they are at level (depth) 1 
				Thread thread = new Thread(mycraw);
				thread.start();
				Thread.sleep(50);

			}

		} catch ( Exception ex){
			System.out.println("there is an exception: " +ex.getMessage());
		}
		finally {
			//mysc.close();
			in.close();
			//of.close();
		}

	}
	
	public void SendURL(String URL)
	{
		try {
			serverWriter.writeBytes(URL+"\n");
		} catch (IOException e) {
			
			e.printStackTrace();
		}
	
	}


	public boolean isErrorLimitAchieved()
	{
		return current_error== error_limit;
	}

	public void ExitCrawSystem ()
	{
		System.exit(1);
	}

	private class ParallelCrawler implements Runnable{

		private String _originHost=null;// host address specified by user
		private int current_depth;
        private int depth_l;
		public  ParallelCrawler (String h, int lastleveldepth, int maxdepth)
		{
			//transfer to the standard form for comparison if there is no http:// or https://
			if((h.length()>=7&&h.substring(0,7).toLowerCase().equals("http://"))||(h.length()>=8&&h.substring(0,8).toLowerCase().equals("https://")))
				_originHost=h;
			else
				_originHost="http://"+h;

			current_depth=lastleveldepth+1;
			depth_l=maxdepth;

		}
		
		public boolean IsDepthLimitAchieved(int l)
		{
			return ( l>depth_l);	
		}

		public boolean lookforhost (String h, Vector<String>v)
		{
			for(int i=0;i<v.size();i++)
				if(h.equals(v.get(i))) return true;
			return false;
		}

		public void run()
		{


			if(isErrorLimitAchieved()) 
			{
				System.out.println("current error level has achieved specified error limit: "+current_error+" Force Exit");
				ExitCrawSystem();
				return;
			}
			//System.out.println("Current depth: "+current_depth);
			//System.out.println("current error level: "+current_error+" "+ System.currentTimeMillis());//for debugging
			//System.out.println(_originHost);
			/*if(IsDepthLimitAchieved(current_depth)) 
			{
				System.out.println("Current depth is already: "+current_depth+" Natural Exit");
				return;
			}*/

			if(_originHost==null) 
				{System.out.println("The host is not specified(null)");
				return;//if newURL is not Specified, return
				}


			String myURL=null;

			if (_originHost.substring(0,7).toLowerCase().equals("http://"))
				myURL=_originHost.substring(7);
			else
				myURL=_originHost.substring(8);
			
			
			startCraw(myURL);

			return;
		}

		private void startCraw (String URL) 
		{
			try
			{


				String Host=null, requestURL=null;
				if(URL.indexOf('/')!=-1)
				{
					Host= URL.substring(0,URL.indexOf('/'));
					requestURL=URL.substring(URL.indexOf('/'));
				}
				else
				{ 
					Host= URL;
					requestURL="/";

				}


				if(lookforhost (Host,hostlist)==true)//if the Host has been visited, does not visit again
					return;//if have visited this URL before, return
				else
					{
					Thread.sleep(50);
					hostlist.add(Host);
					//System.out.println("New add host address: " +Host+ ", now the size is "+hostlist.size());
					}
				

				File file= new File(CRAWFILE);
				FileWriter fw = new FileWriter(file.getAbsoluteFile(), true);//use the appending version of FileWriter


				Socket s= new Socket (Host, 80);
				DataOutputStream outToServer= new DataOutputStream (s.getOutputStream());
				InputStream in= s.getInputStream();
				BufferedReader inFromServer= new BufferedReader(new InputStreamReader(in));

				String request="GET "+requestURL+" HTTP/1.1\r\nHost: "+Host+"\r\n";
				
				long sending_t= System.currentTimeMillis();
				outToServer.write((request+"\r\n").getBytes());

				String header=inFromServer.readLine();

				
				fw.write(_originHost+" "+(System.currentTimeMillis()-sending_t)+ "\n");//record the time first line of response reached to estimate RTT
				fw.close();
				System.out.println("\nRTT estimated of "+ _originHost+" estimated: "+(System.currentTimeMillis()-sending_t));

				//parse the header and make the recursion call to startCraw here
				if(header==null||header.length()==0) 
				{
					current_error++;//if no response, error++
					System.out.println("None response");
					s.close();
					return;
				}
				String split[]= header.split(" ");
				if(split[1].charAt(0)=='4')
				{
					current_error++;//if bad status repsonse, error++
					System.out.println("Error status:" +split[1]+": "+URL);
					s.close();
					fw.close();
					return;
				}
				else if(split[1].charAt(0)=='3')//need redirection
				{
					while((header=inFromServer.readLine()).length()!=0)
					{
						if(header.startsWith("Location:"))
						{
							String redirect= header.substring(header.indexOf("Location: ")+10);
							System.out.println("redirect: "+redirect);
							ParallelCrawler newCrawler= new ParallelCrawler(redirect,current_depth-1,depth_l);//since redirect is at the same level, for the new thread, depth level should be unchanged
							Thread newthread= new Thread(newCrawler);
							newthread.run();
							break;
						}
					}
					s.close();
					return;//if redirect link not specified, exit directly
				}

				SendURL(URL);
				
				if(!IsDepthLimitAchieved(current_depth+1))
				{
					URL thisurl= new URL(_originHost);
					//long start_t= System.currentTimeMillis();
					Document doc= Jsoup.parse(thisurl,10000);//time limit of 10000 is specified, if exceed, throw read time exception; If the URL does not have a link, then HTTP error Fetching URL will be throwed
					
					//System.out.println("The Document has been formed");
					//System.out.println("Time it takes to go through the entity of "+URL+" : "+(System.currentTimeMillis()-start_t)+"\n");
					Elements links = doc.select("a[abs:href]");
					//int i=1;
					//System.out.println("\nThe size of the link list parsed: "+links.size()+"\n");
					String newURL=null;

					for (Element link : links) {
						newURL=link.attr("abs:href");

						//System.out.println("No." +(i++)+ ": "+newURL);
						if(newURL.startsWith("http"))
						{
						    ParallelCrawler newCrawler= new ParallelCrawler(newURL,current_depth,depth_l);
							Thread newthread= new Thread(newCrawler);
							newthread.run();	
							Thread.sleep(50);//introduce some delay in between new calls
						
						}

					}

				}
				//System.out.println("In total of "+ (i-1)+" number of subsequent pages from "+URL);			

				//System.out.println("The end of the operation of :" +_originHost);
				s.close();


			} catch(SocketException e)
			{
				current_error++;//if socket expires or connection is reset, error++
				System.out.println("Sorry, there is a Socket Exception: "+e.getMessage()); 
			}
			catch(UnknownHostException e)
			{
				current_error++;//if the host is not reachable, error++
				System.out.println("Sorry, there is an UnknownHostException: "+e.getMessage());
			}

			catch(IOException e)
			{
				System.out.println("Sorry, there is an IOException: "+e.getMessage());
			} 

			catch (InterruptedException e) {
				e.printStackTrace();
			}

		}


	}
	//pre: 3 input, 1st one is the name of the file containing original Hosts, the 2nd one is the linkdepth_limit for the crawler, last one is the cumulative error_limit for the crawler
	public static void main(String[] args) throws IOException{
		String filename;
		int linkdepth,errorlimit;
		Scanner sc= new Scanner(System.in);
		System.out.println("Please specify the name of the file containing BaseURLs ");
		filename=sc.nextLine();
		System.out.println("Please specify the link depth limit");
		linkdepth=sc.nextInt();
		System.out.println("Please specify the Crawling Error limit");
		errorlimit=sc.nextInt();
		new WebCrawler(filename, linkdepth,errorlimit);
		sc.close();
	}


};
