﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using System.Threading;
using System.Threading.Tasks;
//using System.Web;
using System.Net;
using System.IO;
using System.Text.RegularExpressions;

namespace teavch.Util.SitemapWriter
{

	public delegate void CrawlingCompletedEventHandler( object sender, CrawlingCompletedEventArgs e );
	public delegate void CrawlingStatusChangedEventHandler( object sender, CrawlingStatusChangedEventArgs e );

	public class CrawlingCompletedEventArgs : EventArgs
	{
		public Sitemap Sitemap { get; set; }
	}

	public class CrawlingStatusChangedEventArgs : EventArgs
	{
		public string StatusMessage { get; set; }
	}

	public class Crawler
	{

		private SupportedSearchEngine _searchEngine = SupportedSearchEngine.None;

		public SitemapAttributeIncludeOptions IncludeOptions { get; set; }

		private List<string> Urls = null;
		private List<string> ImageUrls = null;
		public string WebsiteUrl = "";
		//private string dataUrl = "";

		/// <summary>
		/// Provide relative urls to be excluded from crawling...
		/// </summary>
		public List<string> ExcludeUrlsContaining { get; set; }
		public List<string> ExcludeImageUrlsContaining { get; set; }
		public event CrawlingCompletedEventHandler CrawlingCompleted;
		public event CrawlingStatusChangedEventHandler CrawlingStatusChanged;

		public Crawler()
		{
			if ( this.Urls == null )
				this.Urls = new List<string>();

			if ( this.ImageUrls == null )
				this.ImageUrls = new List<string>();

			if ( this.ExcludeUrlsContaining == null )
				this.ExcludeUrlsContaining = new List<string>();

			if ( this.ExcludeUrlsContaining == null )
				this.ExcludeImageUrlsContaining = new List<string>();

			if ( this.IncludeOptions == null )
				this.IncludeOptions = new SitemapAttributeIncludeOptions();
			this.IncludeOptions.IncludeImage = true;
			this.IncludeOptions.IncludeLastModifiedTag = false;
			this.IncludeOptions.IncludeChangeFrequencyTag = false;
			this.IncludeOptions.IncludePriorityTag = false;

		}

		public Crawler( SupportedSearchEngine sse, SitemapAttributeIncludeOptions _includeOptions )
			: this()
		{
			this._searchEngine = sse;
			this.IncludeOptions = _includeOptions;
		}

		/// <summary>
		/// 
		/// </summary>
		/// <param name="urlAddress">Address which you want to start crawling and generate sitemap for.</param>
		/// <param name="urlSiteRoot">This attribute will be used to prepare the complete URL of the site for your sitemap.</param>
		/// <returns></returns>
		public Sitemap Crawl( string urlAddress, string urlSiteRoot )
		{
			this.WebsiteUrl = urlSiteRoot;
			this.CrawlNow( urlAddress, urlSiteRoot );
			return this.getSiteMap();
		}

		private void CrawlNow( object o )
		{
			string[] obj = o as string[];
			if ( obj.Length == 2 )
			{
				this.CrawlNow( obj[ 0 ], obj[ 1 ] );
				Sitemap sitemap = this.getSiteMap();
				CrawlingCompleted( this, new CrawlingCompletedEventArgs() { Sitemap = sitemap } );
			}
		}

		private void CrawlNow( string urlAddress, string urlSiteRoot )
		{
			string sCrawlUpdateMessage = "";
			if ( urlSiteRoot.EndsWith( "/" ) )
			{
				urlSiteRoot = urlSiteRoot.Substring( 0, urlSiteRoot.Length - 1 );
			}
			try
			{
				HttpWebRequest request = ( HttpWebRequest ) WebRequest.Create( urlAddress );
				HttpWebResponse response = ( HttpWebResponse ) request.GetResponse();
				sCrawlUpdateMessage = "Creating request for " + urlAddress;
				this.CrawlingStatusChanged( this, new CrawlingStatusChangedEventArgs() { StatusMessage = sCrawlUpdateMessage } );

				if ( response.StatusCode == HttpStatusCode.OK )
				{
					Stream receiveStream = response.GetResponseStream();
					StreamReader readStream = null;
					if ( response.CharacterSet == null )
						readStream = new StreamReader( receiveStream );
					else
						readStream = new StreamReader( receiveStream, Encoding.GetEncoding( response.CharacterSet ) );
					string data = readStream.ReadToEnd();
					//string ref = @"<a href=""http://www.google.com"">test</a>";
					//var regex = new Regex( "<a [^>]*href=(?:'(?<href>.*?)')|(?:\"(?<href>.*?)\")", RegexOptions.IgnoreCase );
					var regex = new Regex( "<a [^>]*href=(?:\"(?<href>.*?)\")", RegexOptions.IgnoreCase );
					var urls = regex.Matches( data ).OfType<Match>().Select( m => m.Groups[ "href" ].Value );//.SingleOrDefault();

					sCrawlUpdateMessage = "Parsing linked urls at " + urlAddress;
					this.CrawlingStatusChanged( this, new CrawlingStatusChangedEventArgs() { StatusMessage = sCrawlUpdateMessage } );

					foreach ( string url in urls )
					{
						if ( this.shouldProcess( url ) )
						{
							string completeUrl = urlSiteRoot + url.Replace( urlSiteRoot, "" );
							completeUrl = completeUrl.Replace( "../../", "../" ).Replace( "../", "/" );

							if ( !this.Urls.Contains( completeUrl ) )
							{
								if ( this.isValidUrlToAdd( completeUrl ) )
									this.Urls.Add( completeUrl );

								regex = new Regex( "<img [^>]*src=(?:\"(?<src>.*?)\")", RegexOptions.IgnoreCase );
								var imgs = regex.Matches( data ).OfType<Match>().Select( m => m.Groups[ "src" ].Value );//.SingleOrDefault();
								foreach ( string img in imgs )
								{
									//this.shouldProcess( img ) &&
									//if (  this.isValidUrlToAdd( dataUrl ) )
									//{
									string completeImageUrl = img;
									completeImageUrl = completeImageUrl.Replace( "../../", "../" ).Replace( "../", "/" );

									completeImageUrl = urlSiteRoot + completeImageUrl.Replace( urlSiteRoot, "" );
									completeImageUrl = urlAddress + "#teavch#" + completeImageUrl;

									if ( !this.ImageUrls.Contains( completeImageUrl ) )
									{
										if ( this.isValidImageToAdd( completeImageUrl ) )
											this.ImageUrls.Add( completeImageUrl );
									}
									//}
								}

								sCrawlUpdateMessage = "Crawling..." + completeUrl;
								this.CrawlingStatusChanged( this, new CrawlingStatusChangedEventArgs() { StatusMessage = sCrawlUpdateMessage } );
								//this.dataUrl = completeUrl;
								if ( this.isValidUrlToAdd( completeUrl ) )
									this.CrawlNow( completeUrl, urlSiteRoot );
							}
						}
					}
					response.Close();
					readStream.Close();
				}
				else
				{
					return;
				}
			}
			catch ( Exception ) { return; }
			return;
		}

		public void CrawlAsync( string urlAddress, string urlSiteRoot )
		{
			string[] urls = new string[] { urlAddress, urlSiteRoot };
			Thread thread = new System.Threading.Thread( new ParameterizedThreadStart( CrawlNow ), 0 );
			thread.Start( urls );
		}

		private bool shouldProcess( string url )
		{
			bool process = true;

			if ( !
					( url.Trim().ToLower().StartsWith( "#" ) == false &&
						url.Trim() != "/" && url.StartsWith( "http://" ) == false &&
						url.StartsWith( "https://" ) == false &&
						url.Contains( "/images/" ) == false //&&
				//( url.Contains( ".png" ) || url.Contains( ".jpg" ) || url.Contains( ".gif" ) )
					)
				)
			{
				process = false;
			}

			//url.EndsWith(".png")
			//process = !( url.EndsWith( ".png" ) || url.EndsWith( ".jpg" ) || url.EndsWith( ".gif" ) ) && process;

			foreach ( string urlExclude in this.ExcludeUrlsContaining )
			{
				process = !url.Contains( urlExclude ) && process;
			}

			return process;
		}

		private bool isValidUrlToAdd( string url )
		{
			bool process = true;
			process = !(
				url.EndsWith( ".png" ) ||
				url.EndsWith( ".jpg" ) ||
				url.EndsWith( ".gif" ) ||
				url.EndsWith( ".doc" ) ||
				url.EndsWith( ".docx" ) ||
				url.EndsWith( ".xls" ) ||
				url.EndsWith( ".xlsx" ) ||
				url.EndsWith( ".pdf" ) ||
				url.EndsWith( ".jpeg" ) ||
				url.EndsWith( ".flv" ) ||
				url.EndsWith( ".mpp" )
				);
			return process;
		}

		private bool isValidImageToAdd( string url )
		{
			bool process = true;
			process = ( url.EndsWith( ".png" ) || url.EndsWith( ".jpg" ) || url.EndsWith( ".gif" ) );

			foreach ( string urlExclude in this.ExcludeImageUrlsContaining )
			{
				process = !url.Contains( urlExclude ) && process;
			}

			return process;
		}

		private Sitemap getSiteMap()
		{
			this.Urls.Sort();
			Sitemap sitemap = new Sitemap();
			sitemap.SitemapUrlSet = new SitemapUrlSet();
			sitemap.SitemapUrlSet.SitemapUrls = new SitemapUrls<SitemapUrl>();// List<SitemapUrl>();
			sitemap.IncludeOptions = this.IncludeOptions;
			SitemapUrl sitemapUrl = null;
			this.CrawlingStatusChanged( this, new CrawlingStatusChangedEventArgs() { StatusMessage = "Total " + this.Urls.Count.ToString() + " urls crawled. Preparing Sitemap object..." } );
			foreach ( string url in this.Urls )
			{

				sitemapUrl = new SitemapUrl();
				sitemapUrl.Location = new Location( url );
				sitemapUrl.ChangeFrequency = new ChangeFrequency( "Monthly" );
				sitemapUrl.LastModified = new LastModified( DateTime.Now.ToShortDateString() );
				sitemapUrl.Priority = new Priority( "0.8" );
				sitemapUrl.ImageImage = new ImageImage( "" );

				foreach ( string imageUrl in this.ImageUrls )
				{
					if ( imageUrl.StartsWith( url + "#teavch#" ) )
					{
						string[] imgUrl = imageUrl.Split( new string[] { "#teavch#" }, StringSplitOptions.RemoveEmptyEntries );
						sitemapUrl.ImageImage.Location.Add( new Location() { Value = imgUrl[ 1 ], XmlTag = "loc", XmlTagPrefix = "image" } );
					}
				}

				sitemapUrl.Title = new Title( "" );
				sitemapUrl.VideoVideo = new VideoVideo( "" );
				sitemapUrl.Description = new Description( "" );
				sitemap.SitemapUrlSet.SitemapUrls.Add( sitemapUrl );

			}


			//using ( XmlWriter writer = XmlWriter.Create( "Sitemap" + System.Environment.TickCount.ToString() + ".xml", xws ) )
			this.CrawlingStatusChanged
			( this,
				new CrawlingStatusChangedEventArgs() { StatusMessage = "Total " + this.Urls.Count.ToString() + " urls crawled. Writing xml file..." }
			);

			return sitemap;
		}

	}

}