﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
using System.Timers;
using System.Threading.Tasks;
using System.Net;
using KC.DataAccess.Global;

namespace KC.Business.Scheduler.ScheduledPlugins
{
	public class ArticleulatorSpider : ScheduledPluginBase
	{

		public override void Dispose()
		{
		}
		protected override void CloseInner()
		{
		}
		protected override void FailInner()
		{
		}
		protected async override void RunInner()
		{
			DataTable dt = await SQL.ExecuteDataTable(ConnStr, "select top 100 * from articleulator..links where result is null or result='' order by newid()");
			List<DataRow> rows = new List<DataRow>();
			foreach (DataRow dr in dt.Rows)
				rows.Add(dr);
			int done=0;
			Parallel.ForEach(rows, new ParallelOptions() { MaxDegreeOfParallelism=4 }, async row => {
				try { await SpiderLink(row); done++; }
				catch (Exception ex) { Logging.LogError(ex, ConnStr); } });
			Results += done.ToString() + " links spidered; ";
		}


		/// <summary>Spiders a link record from Articulator.dbo.Links</summary>
		/// <param name="dr">DataRow from from Articulator.dbo.Links</param>
		public async Task SpiderLink(DataRow dr)
		{
			if (dr == null) throw new ArgumentNullException("dr");
			// Overhead & Ensure valid URL
			string url = dr["ToUrl"].ToString();
			if (string.IsNullOrEmpty(url)) return;

			if (isUrlBanned(url)) {
				await SQL.ExecuteNonQuery(ConnStr, "update links set result='Banned' where tourl='" + url.Replace("'", "''") + "'");
				return; }

			// Download page content & save HTML into SQL
			string doctxt;
			try { doctxt = new WebClient().DownloadString(url); }
			catch (WebException) { SQL.ExecuteNonQuery(ConnStr, "update links set result='Download Error' where tourl='" + url.Replace("'", "''") + "'"); return; }
			if (doctxt.Length > MaxPageSize) doctxt = doctxt.Substring(0, MaxPageSize);
			await SQL.ExecuteNonQuery(ConnStr, "if (select count(ID) from downloadedpages where url='" + url.Replace("'", "''") + "') = 0 begin \r\n" +
				"insert into DownloadedPages (URL, DocText, Added, SeedQuery) values ('" + url.Replace("'", "''") + "', '" + doctxt.Replace("'", "''") + "', getdate(), ''); end");
			List<string> links = KC.Library.Net.Web.GetLinksFromHtml(doctxt);

			// Insert all links in downloaded HTML
			await SQL.ExecuteNonQuery(ConnStr, "update links set result='Spidered' where tourl='" + url.Replace("'", "''") + "'");
			Parallel.ForEach(links, new ParallelOptions() { MaxDegreeOfParallelism = 2 }, async tourl => {
				try {
					string tourl1 = tourl.Trim().TrimStart(new char[] { '.', '/', '\\' });
					if (string.IsNullOrEmpty(tourl1) || !tourl1.Contains("://") || !tourl1.Contains(".") || (tourl.ToLower().Contains("wiki") && !tourl.Contains("en.")) || tourl.ToLower().Contains("ads.") || tourl.ToLower().Contains("media") || tourl.ToLower().Contains("harvard") || tourl1.ToLower().Contains("sitemap") || isUrlBanned(tourl1))
						return;
					Uri uri;
					if (Uri.TryCreate(tourl, UriKind.Absolute, out uri))
						await SQL.ExecuteNonQuery(ConnStr,
							"if (select count(ID) from links with (nolock) where fromurl='" + url.Replace("'", "''") + "' and tourl='" + uri.AbsoluteUri.Replace("'", "''") + "') = 0 \nbegin \r\n " +
							"insert into links (fromurl, tourl, [date], result) values ('" + url.Replace("'", "''") + "','" + uri.AbsoluteUri.Replace("'", "''") + "', getdate(), '') \r\n" +
							"end"); }
				catch { } });
			await SQL.ExecuteNonQuery(ConnStr, "update links set result='Spidered' where tourl='" + url.Replace("'", "''") + "'");
		}
		public async Task<List<DataRow>> GetLinksToSpider()
		{
			DataTable dtLinks = await SQL.ExecuteDataTable(ConnStr, "select top 2 * from links where result is null or len(result)=0");
			List<DataRow> rows = new List<DataRow>();
			foreach (DataRow dr in dtLinks.Rows)
				rows.Add(dr);
			return rows;
		}

		private const int MaxPageSize = 65536;

		/// <summary>If true, a URL will not be spidered and downloaded.</summary>
		/// <param name="url">URL to check</param>
		/// <returns>true = Do Not Spider / Download; false = Go Nuts</returns>
		public static bool isUrlBanned(string url)
		{
			if (string.IsNullOrEmpty(url)) throw new ArgumentNullException("url");
			url = url.ToLower().Trim();
			return url.Contains("doi.org")
				|| url.Contains("comcast")
				|| url.Contains(".gov")
				|| url.Contains(".pbs")
				|| url.Contains(".mil")
				|| url.Contains("wiley.com")
				|| (!url.StartsWith("http://") && !url.StartsWith("https://"))
				|| url.Contains("cambridge.")
				|| url.Contains("statcounter.com")
				|| url.Contains("newyorker.com")
				|| url.Contains("google.")
				|| url.Contains("bing.")
				|| url.Contains("@")
				|| url.Contains("yahoo.")
				|| url.Contains("aol.com")
				|| url.Contains("ask.com")
				|| url.Contains("faq.archive.org")
				|| url.Contains(".web.archive.org")
				|| url.Contains(".pdf")
				|| url.Contains(".doc")
				|| url.Contains(".ppt")
				|| url.Contains(".xls")
				|| url.Contains(".ps")
				|| url.Contains(".wmv")
				|| url.Contains("mediawiki.org")
				|| url.Contains("doubleclick")
				|| url.Contains("ads.")
				;
		}
	}
}
