﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Common;
using System.Threading;
using System.Xml;
using System.Xml.XPath;
using LuaInterface;
using System.IO;
using System.Text.RegularExpressions;
using System.Diagnostics;
using MySql.Data.MySqlClient;

namespace SpiderServer
{
	public class Job
	{
		MessageQueue<LinkInfo> queueWait;
		MessageQueue<PageDataInfo> queueAnalyze;
		MessageSet<int> setWait;
		MessageSet<int> setCrawled;
		private bool isRun = false;
		private int jobId = 0;

		private Thread analyzeThread;

		private CrawlJobInfo JobInfo
		{
			get
			{
				return Configs.GetCrawlJobById(this.jobId);
			}
		}

		public Job(int jobId)
		{
			this.jobId = jobId;
			queueWait = new MessageQueue<LinkInfo>("list_link_wait" + jobId);
			queueAnalyze = new MessageQueue<PageDataInfo>("list_link_analyze" + jobId);
			setWait = new MessageSet<int>("set_link_wait" + jobId);
			setCrawled = new MessageSet<int>("set_link_crawled" + jobId);
		}

		public string ClearHtmlTags(string HTML)
		{
			string[] Regexs ={
                        @"<script[^>]*?>.*?</script>",
                        @"<(\/\s*)?!?((\w+:)?\w+)(\w+(\s*=?\s*(([""'])(\\[""'tbnr]|[^\7])*?\7|\w+)|.{0})|\s)*?(\/\s*)?>",
                        @"([\r\n])[\s]+",
                        @"&(quot|#34);",
                        @"&(amp|#38);",
                        @"&(lt|#60);",
                        @"&(gt|#62);",
                        @"&(nbsp|#160);",
                        @"&(iexcl|#161);",
                        @"&(cent|#162);",
                        @"&(pound|#163);",
                        @"&(copy|#169);",
                        @"&#(\d+);",
                        @"-->",
                        @"<!--.*\n"
        };

			string[] Replaces ={
                            "",
                            "",
                            "",
                            "\"",
                            "&",
                            "<",
                            ">",
                            " ",
                            "\xa1", //chr(161),
                            "\xa2", //chr(162),
                            "\xa3", //chr(163),
                            "\xa9", //chr(169),
                            "",
                            "\r\n",
                            ""
        };

			string s = HTML;
			for (int i = 0; i < Regexs.Length; i++)
			{
				s = new Regex(Regexs[i], RegexOptions.Multiline | RegexOptions.IgnoreCase).Replace(s, Replaces[i]);
			}
			s.Replace("<", "");
			s.Replace(">", "");
			s.Replace("\r\n", "");
			return s;
		}

		public void PushLinkToWaitQueue(int jobId, string url)
		{
			CrawlJobInfo job = Configs.GetCrawlJobById(jobId);
			CrawlJobFileInfo matchFile = null;
			foreach (CrawlJobFileInfo f in job.Files)
			{
				if (Regex.IsMatch(url, f.UrlPattern))
				{
					matchFile = f;
					break;
				}
			}

			var isLoadLink = false;
			var hashCode = url.GetHashCode();
			if (matchFile != null && matchFile.IsLoadLink)
			{
				isLoadLink = !setWait.IsMember(hashCode);
			}
			else
			{
				isLoadLink = !setWait.IsMember(hashCode) && !setCrawled.IsMember(hashCode);
			}

			//检查增加到待抓取队列
			if (isLoadLink)
			{
				LinkInfo newLink = new LinkInfo();
				newLink.Url = url;
				newLink.JobId = jobId;
				queueWait.Push(newLink);
				setWait.Add(hashCode);
			}
		}

		public void PushPageData(PageDataInfo data)
		{
			queueAnalyze.Push(data);
		}

		public void ReCrawl(string url)
		{
			setCrawled.Remove(url.GetHashCode());
			PushLinkToWaitQueue(this.jobId, url);
		}

		public LinkInfo PopLinkFromWaitQueue()
		{
			LinkInfo link = queueWait.Pop();
			if (link != null)
			{
				Global.DBAccess.ExecuteNonQuery("insert into crawl_task(hash,job_id,url,status,start_time) values(@hash,@job_id,@url,@status,sysdate())",
					new MySqlParameter("hash", link.Url.GetHashCode()),
					new MySqlParameter("job_id", link.JobId),
					new MySqlParameter("url", link.Url),
					new MySqlParameter("status", 1)
				);
			}
			return link;
		}

		private void SavePageData(PageDataInfo data)
		{
			int rows = Global.DBAccess.ExecuteNonQuery("update crawl_data_page set data=@data,modified=sysdate() where hash=@hash",
				new MySqlParameter("hash", data.Url.GetHashCode()),
				new MySqlParameter("data", data.Html)
			);

			if (rows == 0)
			{
				Global.DBAccess.ExecuteNonQuery("insert into crawl_data_page(hash,url,cpu_time,data,created,modified) values(@hash,@url,@cpu_time,@data,@created,@modified)",
					new MySqlParameter("hash", data.Url.GetHashCode()),
					new MySqlParameter("url", data.Url),
					new MySqlParameter("cpu_time", data.CpuTime),
					new MySqlParameter("data", data.Html),
					new MySqlParameter("created", DateTime.Now),
					new MySqlParameter("modified", DateTime.Now)
				);
			}
		}

		public void DelPageData(string url)
		{
			Global.DBAccess.ExecuteNonQuery("delete from crawl_data_page where hash=@hash", new MySqlParameter("hash", url.GetHashCode()));
		}

		public void DelCrawledUrl(string url)
		{
			setCrawled.Remove(url.GetHashCode());
		}

		private Lua GetLua()
		{
			Lua lua = new Lua();
			lua.RegisterFunction("split", this, this.GetType().GetMethod("Split"));
			lua.RegisterFunction("filterString", this, this.GetType().GetMethod("FilterString"));
			lua.RegisterFunction("match", this, this.GetType().GetMethod("Match"));
			lua.RegisterFunction("fetchMatch", this, this.GetType().GetMethod("FetchMatch"));
			lua.RegisterFunction("pushLink", this, this.GetType().GetMethod("PushLinkToWaitQueue"));
			lua.RegisterFunction("fetchWebPage", this, this.GetType().GetMethod("FetchWebPage"));
			lua.RegisterFunction("matchByXPath", this, this.GetType().GetMethod("MatchByXPath"));
			lua.RegisterFunction("clearHtmlTags", this, this.GetType().GetMethod("ClearHtmlTags"));
			lua.RegisterFunction("replaceString", this, this.GetType().GetMethod("ReplaceString"));
			lua.RegisterFunction("matchPropertyByXPath", this, this.GetType().GetMethod("MatchPropertyByXPath"));
			lua.RegisterFunction("delPageData", this, this.GetType().GetMethod("DelPageData"));
			lua.RegisterFunction("delCrawledUrl", this, this.GetType().GetMethod("DelCrawledUrl"));
			return lua;
		}

		private void Analyze()
		{
			PageDataInfo data = null;
			string xml = null;
			CrawlJobInfo job = null;
			while (this.isRun)
			{
				try
				{
					data = queueAnalyze.Pop();
					if (data != null)
					{
						if (string.IsNullOrEmpty(data.Html))
						{
							LinkInfo link = new LinkInfo();
							link.JobId = data.JobId;
							link.Url = data.Url;
							queueWait.Push(link);
							continue;
						}

						job = Configs.GetCrawlJobById(data.JobId);
						if (job == null)
						{
							continue;
						}

						ExtractPageLink(data, job.Files);
						this.UpdateTaskStatus(data.Url.GetHashCode(), data.JobId, 2);
						this.SavePageData(data);

						Stopwatch sw = new Stopwatch();
						sw.Start();

						if (job != null && job.Files != null)
						{
							CrawlJobFileInfo fileInfo = this.MathFile(job.Files, data.Url);
							if (fileInfo != null)
							{
								if (!string.IsNullOrEmpty(fileInfo.Lua))
								{
									using (Lua lua = GetLua())
									{
										lua.DoString(fileInfo.Lua);
										LuaFunction fun = lua.GetFunction("doData");
										if (fun != null)
										{
											fun.Call(fileInfo.JobId, fileInfo.FileId, data.Url, data.Html, job.Encoding);
											fun.Dispose();
										}
									}
								}

								CrawlJobFileFieldInfo[] fields = Configs.GetCrawlFileFieldsByFileId(fileInfo.FileId);
								if (fields != null)
								{
									xml = BuildXmlData(fileInfo.FileId, data.Url, job, fields, data.Html);
									if (!string.IsNullOrEmpty(xml))
									{
										sw.Stop();
										this.InsertFeed(job.JobId, fileInfo.FileId, data.Url, xml, sw.Elapsed.TotalMilliseconds);
									}
								}
							}
						}
					}
				}
				catch (Exception ex)
				{
					EventLogs.Error(ex.ToString() + " URL:" + (data != null ? data.Url : ""));

					if (data != null)
					{
						queueAnalyze.RPush(data);
					}
				}

				if (data == null)
				{
					Thread.Sleep(3000);
				}
				else
				{
					Thread.Sleep(100);
				}
				xml = null;
				job = null;
			}
		}

		private void ExtractPageLink(PageDataInfo data, CrawlJobFileInfo[] jobFiles)
		{
			if (jobFiles == null)
			{
				return;
			}

			string urlRule = "";
			int j = 0;
			foreach (CrawlJobFileInfo fri in jobFiles)
			{
				string strTmp = string.Format("^{0}$", fri.UrlPattern);
				if (j == 0)
				{
					urlRule = strTmp;
				}
				else
				{
					urlRule += "|" + strTmp;
				}
				j++;
			}

			if (urlRule == "") return;

			Regex regex = new Regex("<a(.+?)href\\s*=\\s*(\"(?<key>[^\"]*)\"|(?<key>\\s+))(.+?)</a>", RegexOptions.IgnoreCase);
			MatchCollection matchs = regex.Matches(data.Html);
			if (matchs.Count > 0)
			{
				List<CrawlJobFileInfo> arrFiles = new List<CrawlJobFileInfo>();
				foreach (Match match in matchs)
				{
					string href = match.Groups["key"].Value;
					if (href.ToLower().StartsWith("#") || href.ToLower().StartsWith("javascript")) continue;

					string strTmp = href;
					Uri uri = new Uri(data.Url);
					if (!strTmp.ToLower().StartsWith("http://"))
					{
						if (!strTmp.StartsWith("/"))
						{
							if (uri.Segments.Length == 1)
							{
								strTmp = "http://" + uri.Host + "/" + strTmp;
							}
							else
							{
								string segments = "";
								for (int i = 0; i < uri.Segments.Length - 1; i++)
								{
									segments += uri.Segments[i];
								}
								strTmp = "http://" + uri.Host + segments + strTmp;
							}
						}
						else
						{
							strTmp = "http://" + uri.Host + strTmp;
						}
					}

					if (Regex.IsMatch(strTmp, urlRule))
					{
						CrawlJobFileInfo matchFile = null;
						foreach (CrawlJobFileInfo f in jobFiles)
						{
							if (Regex.IsMatch(strTmp, f.UrlPattern))
							{
								matchFile = f;
								break;
							}
						}

						var isLoadLink = false;
						var hashCode = strTmp.GetHashCode();
						if (matchFile != null && matchFile.IsLoadLink)
						{
							isLoadLink = !setWait.IsMember(hashCode);
						}
						else
						{
							isLoadLink = !setWait.IsMember(hashCode) && !setCrawled.IsMember(hashCode);
						}

						//检查增加到待抓取队列
						if (isLoadLink)
						{
							LinkInfo newLink = new LinkInfo();
							newLink.Url = strTmp;
							newLink.JobId = data.JobId;
							queueWait.Push(newLink);
							setWait.Add(hashCode);
						}
					}
				}
			}
		}	

		public void Start()
		{
			string[] urls = this.JobInfo.Urls.Split(',');
			if (urls != null)
			{
				foreach (string url in urls)
				{
					this.PushLinkToWaitQueue(this.jobId, url);
				}
			}

			lock (this)
			{
				if (!this.isRun)
				{
					this.analyzeThread = new Thread(this.Analyze);
					this.analyzeThread.IsBackground = true;
					this.analyzeThread.Start();
					this.isRun = true;
				}
			}
		}

		public void Stop()
		{
			this.isRun = false;
			if (!analyzeThread.Join(5000))
			{
				try
				{
					analyzeThread.Abort();
				}
				catch
				{
				}
			}
		}

		private CrawlJobFileInfo MathFile(CrawlJobFileInfo[] fileInfos, string url)
		{
			foreach (CrawlJobFileInfo item in fileInfos)
			{
				Regex regex = new Regex(string.Format("^{0}$", item.UrlPattern));
				if (regex.IsMatch(url))
				{
					return item;
				}
			}
			return null;
		}

		private void InsertFeed(int jobId, int fileId, string url, string xml, double cpuTime)
		{
			int hash = (url + "$" + fileId).GetHashCode();
			int rows = Global.DBAccess.ExecuteNonQuery("update crawl_data_feed set xml=@xml,modified=sysdate(),cpu_time=@cpu_time where hash=@hash",
				new MySqlParameter("hash", hash),
				new MySqlParameter("cpu_time", cpuTime),
				new MySqlParameter("xml", xml)
			);

			if (rows == 0)
			{
				Global.DBAccess.ExecuteNonQuery("insert into crawl_data_feed(hash,job_id,file_id,url,xml,created,modified,cpu_time) values(@hash,@job_id,@file_id,@url,@xml,sysdate(),sysdate(),@cpu_time)",
					new MySqlParameter("hash", hash),
					new MySqlParameter("cpu_time", cpuTime),
					new MySqlParameter("job_id", jobId),
					new MySqlParameter("file_id", fileId),
					new MySqlParameter("url", url),
					new MySqlParameter("xml", xml)
				);
			}
		}

		private void UpdateTaskStatus(int hash, int jobId, int status)
		{
			int rows = Global.DBAccess.ExecuteNonQuery("update crawl_task set status=@status where hash=@hash and job_id=@job_id",
				new MySqlParameter("status", status),
				new MySqlParameter("hash", hash),
				new MySqlParameter("job_id", jobId)
			);
		}

		public string BuildXmlData(int fileId, string url)
		{
			CrawlJobFileInfo fileInfo = Configs.GetCrawlFileById(fileId);
			if (fileInfo == null)
			{
				return null;
			}

			CrawlJobInfo jobInfo = Configs.GetCrawlJobById(fileInfo.JobId);
			try
			{
				string strHtml = Configs.GetPageData(url, jobInfo.Encoding);
				return BuildXmlData(fileId, url, jobInfo, Configs.GetCrawlFileFieldsByFileId(fileId), strHtml);
			}
			catch (Exception ex)
			{
				return ex.ToString();
			}
		}

		private string BuildXmlData(int fileId, string url, CrawlJobInfo jobInfo, CrawlJobFileFieldInfo[] fields, string html)
		{
			XmlDocument doc = FromHtml(html);
			XPathNavigator nav = doc.CreateNavigator();

			StringBuilder sb = new StringBuilder();
			sb.AppendFormat("<root fileid=\"{0}\" url='{1}'>", fileId, url);
			foreach (CrawlJobFileFieldInfo f in fields)
			{
				sb.Append(BuildXmlNodes(jobInfo, f, nav, url, html));
			}
			sb.Append("</root>");
			return sb.ToString();
		}

		private XmlDocument FromHtml(string html)
		{
			html = html.Replace("xmlns=\"http://www.w3.org/1999/xhtml\"", "");
			// setup SGMLReader
			Sgml.SgmlReader sgmlReader = new Sgml.SgmlReader();
			sgmlReader.DocType = "HTML";
			sgmlReader.WhitespaceHandling = WhitespaceHandling.All;
			sgmlReader.CaseFolding = Sgml.CaseFolding.ToLower;
			sgmlReader.InputStream = new StringReader(html);

			// create document
			XmlDocument doc = new XmlDocument();
			doc.PreserveWhitespace = true;
			doc.XmlResolver = null;
			doc.Load(sgmlReader);
			return doc;
		}

		public string[] FetchMatch(string pattern, string url, string encoding)
		{
			string html = FetchWebPage(url, encoding);
			return Match(pattern, html);
		}

		public string[] FetchMatchByXPath(string xpath, string url, string encoding)
		{
			string html = FetchWebPage(url, encoding); ;
			return MatchByXPath(xpath, html);
		}

		public string FetchWebPage(string url, string encoding)
		{
			return Configs.GetPageData(url, encoding);
		}

		public string[] Match(string pattern, string input)
		{
			StringBuilder sb = new StringBuilder();
			Regex regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
			MatchCollection matchs = regex.Matches(input);

			if (matchs.Count == 0)
			{
				return null;
			}

			string[] arr = new string[matchs.Count];
			int i = 0;
			foreach (Match match in matchs)
			{
				arr[i] = ReplaceSpecialString(match.Result("$1"));
				i++;
			}
			return arr;
		}

		public string[] MatchPropertyByXPath(string xpath, string propNames, string input)
		{
			XmlDocument doc = FromHtml(input);
			XPathNavigator nav = doc.CreateNavigator();
			XPathNodeIterator nodes = nav.Select(xpath);
			if (nodes.Count == 0)
			{
				return null;
			}

			if (string.IsNullOrEmpty(propNames))
			{
				return null;
			}

			string[] arrPropNames = propNames.Split(',');
			List<string> result = new List<string>();
			while (nodes.MoveNext())
			{
				foreach (string name in arrPropNames)
				{
					string value = nodes.Current.GetAttribute(name, string.Empty);
					if (!string.IsNullOrEmpty(value))
					{
						result.Add(value);
					}
				}
			}
			return result.ToArray();
		}

		public string[] MatchByXPath(string xpath, string input)
		{
			XmlDocument doc = FromHtml(input);
			XPathNavigator nav = doc.CreateNavigator();
			XPathNodeIterator nodes = nav.Select(xpath);
			if (nodes.Count == 0)
			{
				return null;
			}

			int i = 0;
			string oldValue = null;
			string value = null;
			XPathNodeIterator childNodes = null;
			string[] arr = new string[nodes.Count];
			while (nodes.MoveNext())
			{
				oldValue = ReplaceSpecialString(nodes.Current.Value);
				if (nodes.Current.Name.ToLower() == "img")
				{
					oldValue = nodes.Current.GetAttribute("src", string.Empty);
				}

				childNodes = nodes.Current.Select("./*");
				while (childNodes.MoveNext())
				{
					childNodes.Current.DeleteSelf();
				}

				value = ReplaceSpecialString(nodes.Current.Value);
				if (string.IsNullOrEmpty(value))
				{
					value = oldValue;
				}

				arr[i] = value;
				i++;
			}
			return arr;
		}

		public string[] Split(string str, string c)
		{
			string[] arr = str.Split(c.ToArray());
			for (int i = 0; i < arr.Length; i++)
			{
				arr[i] = arr[i].Trim();
			}
			return arr;
		}

		public string FilterString(string str, string filterStrings)
		{
			for (int i = 0; i < filterStrings.Length; i++)
			{
				str = str.Replace(filterStrings[i].ToString(), "");
			}
			return str;
		}

		public string ReplaceString(string str, string str1, string str2)
		{
			return str.Replace(str1, str2);
		}

		private string ReplaceSpecialString(string str)
		{
			str = str.Trim();
			string[] arr = new string[] { "\t", "\n", "\r", "&nbsp;" };
			for (int i = 0; i < arr.Length; i++)
			{
				str = str.Replace(arr[i].ToString(), "");
			}
			return str;
		}

		private string BuildXmlNodes(CrawlJobInfo jobInfo, CrawlJobFileFieldInfo f, XPathNavigator nav, string url, string html)
		{
			Lua lua = null;
			LuaFunction fun = null;
			string value = null;
			string oldValue = null;
			XPathNodeIterator childNodes = null;
			try
			{
				if (!string.IsNullOrEmpty(f.Lua))
				{
					lua = GetLua();
					lua.DoString(f.Lua);
					fun = lua.GetFunction("doData");
				}

				StringBuilder sb = new StringBuilder();
				if (!string.IsNullOrEmpty(f.XPath))
				{
					XPathNodeIterator nodes = nav.Select(f.XPath);
					if (nodes.Count == 0)
					{
						if (!string.IsNullOrEmpty(f.Lua))
						{
							if (fun != null)
							{

								object[] objRet = fun.Call(f.FieldId, f.Code, html, jobInfo.Encoding);
								if (objRet != null && objRet[0] != null)
								{
									return objRet[0].ToString();
								}
								else
								{
									return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
								}
							}
						}
					}

					while (nodes.MoveNext())
					{
						oldValue = ReplaceSpecialString(nodes.Current.Value);
						if (nodes.Current.Name.ToLower() == "img")
						{
							oldValue = nodes.Current.GetAttribute("src", string.Empty);
						}

						childNodes = nodes.Current.Select("./*");
						while (childNodes.MoveNext())
						{
							childNodes.Current.DeleteSelf();
						}

						value = ReplaceSpecialString(nodes.Current.Value);
						if (string.IsNullOrEmpty(value))
						{
							value = oldValue;
						}

						if (fun != null)
						{
							object[] objRet = fun.Call(f.FieldId, f.Code, url, html, jobInfo.Encoding, value, nodes.Current.InnerXml);
							if (objRet != null && objRet[0] != null)
							{
								sb.Append(objRet[0].ToString());
							}
							else
							{
								sb.AppendFormat("<{0} id=\"{1}\"><![CDATA[{2}]]></{0}>", f.Code, f.FieldId, value);
							}
						}
						else
						{
							sb.AppendFormat("<{0} id=\"{1}\"><![CDATA[{2}]]></{0}>", f.Code, f.FieldId, value);
						}
					}
				}
				else
				{
					if (fun != null)
					{
						object[] objRet = fun.Call(f.FieldId, f.Code, url, html, jobInfo.Encoding);
						if (objRet != null && objRet[0] != null)
						{
							return objRet[0].ToString();
						}
						else
						{
							return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
						}
					}
					else
					{
						return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
					}
				}
				return sb.ToString();
			}
			catch (Exception ex)
			{
				throw new Exception("\"" + f.Name + "(" + f.Code + ")\"节点发生异常,信息：" + ex.ToString());
			}
			finally
			{
				if (fun != null)
				{
					fun.Dispose();
				}

				if (lua != null)
				{
					lua.Dispose();
				}

				lua = null;
				fun = null;
				value = null;
				oldValue = null;
				childNodes = null;
			}
		}

	}
}
