using System;
using System.Collections;
using System.Configuration;
using System.IO;
using System.Net;
using System.Text.RegularExpressions;
using System.Web;
using SecuBat.Crawling.Forms;
using Css.Diagnostics;
using SecuBat.Logging;

namespace SecuBat.Crawling
{
	/// <summary>
	/// Summary description for WebPage.
	/// </summary>
	public class WebPage
	{
		private WebAddress _address;
		private WebAddress _responseAddress;
		private String _htmlContent;
		private bool _isProcessed;
		private HttpStatusCode _statusCode;
		private WebAddressCollection _links;
		private WebFormCollection _forms;
		private int _pageId;
		private int _runId;

		public string HtmlContent
		{
			get { return _htmlContent; }
			set { _htmlContent = value; }
		}

		public HttpStatusCode StatusCode
		{
			get { return _statusCode; }
			set { _statusCode = value; }
		}

		public bool IsProcessed
		{
			get { return _isProcessed; }
			set { _isProcessed = value; }
		}

		public WebAddressCollection Links
		{
			get { return _links; }
			set { _links = value; }
		}

		public WebAddress Address
		{
			get { return _address; }
			set { _address = value; }
		}

		public WebAddress ResponseAddress
		{
			get { return _responseAddress; }
			set { _responseAddress = value; }
		}

		public WebFormCollection Forms
		{
			get { return _forms; }
			set { _forms = value; }
		}

		public WebPage(WebAddress address, int runId)
		{
			Address = address;
			ResponseAddress = null;
			_htmlContent = "";
			IsProcessed = false;
			Links = new WebAddressCollection();
			Forms = new WebFormCollection();
			_pageId = -1;
			_runId = runId;
		}

		public void ProcessPage(Hashtable formsAlreadyAttacked, int actualDepth)
		{
			try
			{
				DateTime crawlTime = DateTime.Now;
				this.GetResponse();
				int duration = (int) DateTime.Now.Subtract(crawlTime).TotalMilliseconds;

				// Log crawled page
				String referer = null;
				if (this.Address.Referer != null)
					referer = this.Address.Referer.Url;
				_pageId = CrawlDbManager.NewWebPage(_runId, this.Address.Url, referer, actualDepth, DateTime.Now, duration);

				this.GetLinks();
				this.GetForms(formsAlreadyAttacked);
			}
			catch (Exception ex)
			{
				LogManager.Write(TraceLevel.Error, ex, "Web Page processing");
			}
			IsProcessed = true;
		}

		private void GetResponse()
		{
			Stream inStream = null;
			HttpWebRequest httpRequest = null;
			HttpWebResponse httpResponse = null;
			try
			{
				httpRequest = (HttpWebRequest) WebRequest.Create(Address.Url);
				httpRequest.KeepAlive = false;
				httpRequest.Timeout = Int32.Parse(ConfigurationSettings.AppSettings["WebRequestTimeout"]);
				if (Address.Referer != null)
					httpRequest.Referer = Address.Referer.Url;
				httpResponse = (HttpWebResponse) httpRequest.GetResponse();
				StatusCode = httpResponse.StatusCode;
				inStream = httpResponse.GetResponseStream();
				
				// TODO: Sometimes hangs!
				using (StreamReader reader = new StreamReader(inStream))
					HtmlContent = reader.ReadToEnd();
				
				try
				{
					ResponseAddress = new WebAddress(httpResponse.ResponseUri.AbsoluteUri);
				}
				catch (InvalidWebAddressException e)
				{
					LogManager.Write(TraceLevel.Warning, e, "Web Page Retrieval");
				}

				LogManager.Write(TraceLevel.Information, "Retrieval Success (" + _statusCode + "): " + Address.Url, "Web Page Retrieval");

			}
			catch (Exception ex)
			{
				LogManager.Write(TraceLevel.Error, "Retrieval Failed (" + _statusCode + "): " + Address.Url + " - " + ex, "Web Page Retrieval");
			}
			finally
			{
				if (inStream != null)
					inStream.Close();

				if (httpResponse != null)
					httpResponse.Close();
			}
		}

		private void GetLinks()
		{
			// TODO: add other links too (frames, meta refresh, image maps...)
			String hrefExpr = "<\\s*?a\\s[^>]*?href\\s*?=\\s*?([\\\"']?)([^\\1]*?)\\1(>|\\s.*?>)";
			Regex hrefRegex = new Regex(hrefExpr, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline);
			MatchCollection col = hrefRegex.Matches(_htmlContent);
			foreach (Match m in col)
			{
				try
				{
					String url = HttpUtility.HtmlDecode(m.Groups[2].Value);

					if (ResponseAddress != null)
						this._links.Add(new WebAddress(url, this.ResponseAddress));
					else
						this._links.Add(new WebAddress(url, this.Address));
				}
				catch// (InvalidWebAddressException e)
				{
					//LogManager.Write(TraceLevel.Information, e, "Link analysis");
				}
			}
		}

		private void GetForms(Hashtable formsAlreadyAttacked)
		{
			String formExpr = "(<\\s*?form(>|([^>]*?)>))(.*?)<\\s*?/\\s*?form\\s*?>";
			Regex formRegex = new Regex(formExpr, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline);
			MatchCollection col = formRegex.Matches(_htmlContent);
			for (int i = 0; i < col.Count; i++)
			{
				try
				{
					Match m = col[i];
          WebForm f = new WebForm(_pageId, m.Value, m.Groups[1].Value, this.Address, i);
					
					// Only add not already attacked forms for gaining efficiency 
					if (!formsAlreadyAttacked.Contains(f.FormIdentifier))
					{
						// Log the found form
						f.Register();
						Forms.Add(f);
					}
					else
						LogManager.Write(TraceLevel.Verbose, String.Format("Ignoring form with action address \"{0}\" because it has been attacked already.", f.ActionAddress.Url), "Form extraction");
				}
				catch (Exception ex)
				{
					LogManager.Write(TraceLevel.Warning, ex, "Form extraction");
				}
			}			
		}
	}
}
