package crawler;

import java.net.*;
import java.io.*;
import java.util.*;

public class webGetter {
	
	private URL webURL;
	private URLConnection conn;
	private InputStream inStream;
	private BufferedReader pageRead;
	private int responseCode;
	private LinkedList<String> webPageContents;
	
	public webGetter(String url) throws Exception{
		webURL = new URL(url);
		conn = webURL.openConnection();
		responseCode = ((HttpURLConnection)this.conn).getResponseCode();
		if(responseCode<400)
			inStream = webURL.openStream();
		else
			inStream = null;
		webPageContents = new LinkedList<String>();
	}
	
	public void readWebPage()throws Exception{
		if(inStream==null){
			webPageContents = null;
			return;
		}
		this.pageRead = new BufferedReader(new InputStreamReader(this.inStream));
		String line = this.pageRead.readLine();
		while(line!=null){
			this.webPageContents.add(line);
			line = this.pageRead.readLine();
		}
		this.pageRead.close();
	}
	
	public LinkedList<String> getWebPageContents(){
		return this.webPageContents;
	}
	
	public int getResponseCode(){
		return this.responseCode;
	}
	
	public URLConnection getURLConnection(){
		return this.conn;
	}
	
	public void printWebPageContents(){
		if(this.webPageContents!=null){
			for(String line:this.webPageContents){
				System.out.println(line);
			}
		}
		else{
			System.out.println("No content could be appropriated from the server for this webpage. ("+this.webURL+")");
		}
	}
	
}