﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using TeamDev.Redis;
using ServiceStack.Redis;
using Common;
using System.Net;
using System.Text.RegularExpressions;
using System.Diagnostics;
using MySql.Data.MySqlClient;
using ServiceStack.Redis.Support;
using Sgml;
using System.IO;
using System.Xml.XPath;
using System.Xml;
using System.Data;
using LuaInterface;
using System.Web;

namespace SpiderServer
{
	class Threads
	{
		private static List<ThreadBase> s_threads;
		static Threads()
		{
			s_threads = new List<ThreadBase>();
		}

		public static void Remove(ThreadBase t)
		{
			s_threads.Remove(t);
		}

		public static void Run(ThreadBase t)
		{
			s_threads.Add(t);
		}

		public static void Stop()
		{
			foreach (ThreadBase t in s_threads)
			{
				t.Stop();
			}
		}
	}

	abstract class ThreadBase
	{
		private Thread thread;
		protected bool isStop;
		public ThreadBase()
		{
			isStop = false;
			thread = new Thread(new ThreadStart(this.Run));
			thread.IsBackground = true;
			OnCreateThread(thread);
			thread.Start();
		}

		protected virtual void OnCreateThread(Thread thread)
		{

		}

		protected virtual void Run()
		{

		}

		public void Stop()
		{
			isStop = true;
			try
			{
				Threads.Remove(this);
				if (!thread.Join(2000))
				{
					thread.Abort();
				}
			}
			catch
			{
			}
		}
	}

	class SpiderThread : ThreadBase
	{
		MessageQueue<LinkInfo> queueWait;
		MessageQueue<LinkInfo> queueAnalyze;
		MessageSet<int> setWait;
		MessageSet<int> setCrawled;
		private bool isRun = false;
		private int jobId = 0;

		public SpiderThread(int jobId)
		{
			this.jobId = jobId;

			queueWait = new MessageQueue<LinkInfo>("list_link_wait" + jobId);
			queueAnalyze = new MessageQueue<LinkInfo>("list_link_analyze" + jobId);
			setWait = new MessageSet<int>("set_link_wait" + jobId);
			setCrawled = new MessageSet<int>("set_link_crawled" + jobId);
		}

		protected override void Run()
		{
			string strHtml = null;
			LinkInfo link = null;
			CrawlJobInfo curJob = null;
			CrawlJobFileInfo file = null;
			while (!isStop)
			{
				try
				{
					curJob = Configs.GetCrawlJobById(this.jobId);
					link = queueWait.Pop();
					if (link != null)
					{
						CrawlJobInfo job = Configs.GetCrawlJobById(link.JobId);
						if (job != null)
						{
							int fileId = 0;
							file = GetFileInfo(link.Url, job.Files);
							if (file != null)
							{
								fileId = file.FileId;
							}

							strHtml = Configs.GetPageData(link.Url, fileId, job.Encoding);
							if (!string.IsNullOrEmpty(strHtml))
							{
								setCrawled.Add(link.Url.GetHashCode());
								queueAnalyze.Push(link);
								this.ExtractPageLink(link, strHtml, job.Files);
							}
							setWait.Remove(link.Url.GetHashCode());
						}
					}
				}
				catch (Exception ex)
				{
					EventLogs.Error(ex.ToString() + " URL:" + (link != null ? link.Url : ""));
					if (link != null)
					{
						queueWait.RPush(link);
						setWait.Add(link.Url.GetHashCode());
					}
				}

				if (link == null)
				{
					Thread.Sleep(3000);
				}
				else
				{
					if (curJob != null)
					{
						Thread.Sleep(curJob.Speed);
					}
					else
					{
						Thread.Sleep(1000);
					}
				}

				strHtml = null;
				link = null;
				curJob = null;
			}
		}

		private string GetWeb(string url, string encoding, string cookie)
		{
			string html = null;
			HttpWebResponse response = null;
			StreamReader stream = null;
			try
			{
				HttpWebRequest webRequest = (HttpWebRequest)WebRequest.Create(url);
				webRequest.UserAgent = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.83 Safari/537.1";
				webRequest.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
				webRequest.Referer = url;
				webRequest.Headers.Add("Cache-Control", "max-age=0");
				webRequest.Headers.Add("Accept-Language", "zh-CN,zh;q=0.8");
				webRequest.Headers.Add("Accept-Encoding", "gzip,deflate,sdch");
				webRequest.Headers.Add("Accept-Charset", "GBK,utf-8;q=0.7,*;q=0.3");
				webRequest.Credentials = CredentialCache.DefaultCredentials;
				webRequest.KeepAlive = true;
				if (string.IsNullOrEmpty(cookie))
				{
					webRequest.Headers.Add("Cookie", cookie);
				}

				webRequest.ReadWriteTimeout = 30 * 1000;
				response = (HttpWebResponse)webRequest.GetResponse();
				if (response.StatusCode == HttpStatusCode.OK)
				{
					stream = new StreamReader(response.GetResponseStream(), Encoding.GetEncoding(encoding));
					html = stream.ReadToEnd();
				}
			}
			catch (Exception ex)
			{
				return null;
			}
			finally
			{
				if (response != null)
					response.Close();

				if (stream != null)
					stream.Close();
			}
			return html;
		}

		private CrawlJobFileInfo GetFileInfo(string url, CrawlJobFileInfo[] jobFiles)
		{
			CrawlJobFileInfo matchFile = null;
			foreach (CrawlJobFileInfo f in jobFiles)
			{
				if (Regex.IsMatch(url, f.UrlPattern))
				{
					matchFile = f;
					break;
				}
			}
			return matchFile;
		}

		private void ExtractPageLink(LinkInfo link, string strHtml, CrawlJobFileInfo[] jobFiles)
		{
			if (jobFiles == null)
			{
				return;
			}

			string urlRule = "";
			int j = 0;
			foreach (CrawlJobFileInfo fri in jobFiles)
			{
				string strTmp = string.Format("^{0}$", fri.UrlPattern);
				if (j == 0)
				{
					urlRule = strTmp;
				}
				else
				{
					urlRule += "|" + strTmp;
				}
				j++;
			}

			if (urlRule == "") return;

			Regex regex = new Regex("<a(.+?)href\\s*=\\s*(\"(?<key>[^\"]*)\"|(?<key>\\s+))(.+?)</a>", RegexOptions.IgnoreCase);
			MatchCollection matchs = regex.Matches(strHtml);
			if (matchs.Count > 0)
			{
				List<CrawlJobFileInfo> arrFiles = new List<CrawlJobFileInfo>();
				foreach (Match match in matchs)
				{
					string href = match.Groups["key"].Value;
					if (href.ToLower().StartsWith("#") || href.ToLower().StartsWith("javascript")) continue;

					string strTmp = href;
					Uri uri = new Uri(link.Url);
					if (!strTmp.ToLower().StartsWith("http://"))
					{
						if (!strTmp.StartsWith("/"))
						{
							if (uri.Segments.Length == 1)
							{
								strTmp = "http://" + uri.Host + "/" + strTmp;
							}
							else
							{
								string segments = "";
								for (int i = 0; i < uri.Segments.Length - 1; i++)
								{
									segments += uri.Segments[i];
								}
								strTmp = "http://" + uri.Host + segments + strTmp;
							}
						}
						else
						{
							strTmp = "http://" + uri.Host + strTmp;
						}
					}

					if (Regex.IsMatch(strTmp, urlRule))
					{
						CrawlJobFileInfo matchFile = null;
						foreach (CrawlJobFileInfo f in jobFiles)
						{
							if(Regex.IsMatch(strTmp, f.UrlPattern))
							{
								matchFile = f;
								break;
							}
						}

						var isLoadLink = false;
						var hashCode = strTmp.GetHashCode();
						if (matchFile != null && matchFile.IsLoadLink)
						{
							isLoadLink = !setWait.IsMember(hashCode);
						}
						else
						{
							isLoadLink = !setWait.IsMember(hashCode) && !setCrawled.IsMember(hashCode);
						}

						//检查增加到待抓取队列
						if (isLoadLink)
						{
							LinkInfo newLink = new LinkInfo();
							newLink.Url = strTmp;
							newLink.JobId = link.JobId;
							queueWait.Push(newLink);
							setWait.Add(hashCode);
						}
					}
				}
			}
		}	
	}

	class AnalyzeThread : ThreadBase
	{
		MessageQueue<LinkInfo> queueWait;
		MessageQueue<LinkInfo> queueAnalyze;
		MessageSet<int> setWait;
		MessageSet<int> setCrawled;
		int jobId;

		public AnalyzeThread(int jobId)
		{
			this.jobId = jobId;
			queueWait = new MessageQueue<LinkInfo>("list_link_wait" + jobId);
			queueAnalyze = new MessageQueue<LinkInfo>("list_link_analyze" + jobId);
			setWait = new MessageSet<int>("set_link_wait" + jobId);
			setCrawled = new MessageSet<int>("set_link_crawled" + jobId);
		}

		protected override void Run()
		{
			LinkInfo link = null;
			string html = null;
			string xml = null;
			CrawlJobInfo job = null;
			while (!isStop)
			{
				try
				{
					link = queueAnalyze.Pop();
					if (link != null)
					{
						job = Configs.GetCrawlJobById(link.JobId);
						if (job == null)
						{
							continue;
						}
						Stopwatch sw = new Stopwatch();
						sw.Start();

						html = Configs.GetPageData(link.Url, job.Encoding);
						if (html != null)
						{
							if (job != null && job.Files != null)
							{
								CrawlJobFileInfo fileInfo = this.MathFile(job.Files, link.Url);
								if (fileInfo != null)
								{
									if (!string.IsNullOrEmpty(fileInfo.Lua))
									{
										using (Lua lua = new Lua())
										{
											lua.RegisterFunction("split", this, this.GetType().GetMethod("Split"));
											lua.RegisterFunction("filterString", this, this.GetType().GetMethod("FilterString"));
											lua.RegisterFunction("match", this, this.GetType().GetMethod("Match"));
											lua.RegisterFunction("fetchMatch", this, this.GetType().GetMethod("FetchMatch"));
											lua.RegisterFunction("pushLink", this, this.GetType().GetMethod("PushLinkToWaitQueue"));
											lua.RegisterFunction("fetchWebPage", this, this.GetType().GetMethod("FetchWebPage"));
											lua.RegisterFunction("matchByXPath", this, this.GetType().GetMethod("MatchByXPath"));
											lua.RegisterFunction("clearHtmlTags", this, this.GetType().GetMethod("ClearHtmlTags"));
											lua.RegisterFunction("replaceString", this, this.GetType().GetMethod("ReplaceString"));
											lua.RegisterFunction("matchPropertyByXPath", this, this.GetType().GetMethod("MatchPropertyByXPath"));
											lua.RegisterFunction("delPageData", this, this.GetType().GetMethod("DelPageData"));
											lua.RegisterFunction("delCrawledUrl", this, this.GetType().GetMethod("DelCrawledUrl"));
											
											lua.DoString(fileInfo.Lua);
											LuaFunction fun = lua.GetFunction("doData");
											if (fun != null)
											{
												fun.Call(fileInfo.JobId, fileInfo.FileId, link.Url, html, job.Encoding);
												fun.Dispose();
											}
										}
									}

									CrawlJobFileFieldInfo[] fields = Configs.GetCrawlFileFieldsByFileId(fileInfo.FileId);
									if (fields != null)
									{
										xml = BuildXmlData(fileInfo.FileId, link.Url, job, fields, html);
										if (!string.IsNullOrEmpty(xml))
										{
											sw.Stop();
											this.InsertFeed(job.JobId, fileInfo.FileId, link.Url, xml, sw.Elapsed.TotalMilliseconds);
										}
									}
								}
							}
						}
					}
				}
				catch (Exception ex)
				{
					EventLogs.Error(ex.ToString() + " URL:"+ (link != null ? link.Url: ""));

					if (link != null)
					{
						queueAnalyze.RPush(link);
					}
				}

				if (link == null)
				{
					Thread.Sleep(3000);
				}
				else
				{
					Thread.Sleep(100);
				}

				link = null;
				html = null;
				xml = null;
				job = null;
			}
		}


		public void DelPageData(string url)
		{
			Global.DBAccess.ExecuteNonQuery("delete from crawl_data_page where hash=@hash", new MySqlParameter("hash", url.GetHashCode()));
		}

		public void DelCrawledUrl(string url)
		{
			setCrawled.Remove(url.GetHashCode());
		}

		public void PushLinkToWaitQueue(int jobId, string url)
		{
			CrawlJobInfo job = Configs.GetCrawlJobById(jobId);
			CrawlJobFileInfo matchFile = null;
			foreach (CrawlJobFileInfo f in job.Files)
			{
				if (Regex.IsMatch(url, f.UrlPattern))
				{
					matchFile = f;
					break;
				}
			}

			var isLoadLink = false;
			var hashCode = url.GetHashCode();
			if (matchFile != null && matchFile.IsLoadLink)
			{
				isLoadLink = !setWait.IsMember(hashCode);
			}
			else
			{
				isLoadLink = !setWait.IsMember(hashCode) && !setCrawled.IsMember(hashCode);
			}

			//检查增加到待抓取队列
			if (isLoadLink)
			{
				LinkInfo newLink = new LinkInfo();
				newLink.Url = url;
				newLink.JobId = jobId;
				queueWait.Push(newLink);
				setWait.Add(hashCode);
			}
		}

		private void InsertFeed(int jobId, int fileId, string url, string xml, double cpuTime)
		{
			int hash = (url + "$" + fileId).GetHashCode();
			int rows = Global.DBAccess.ExecuteNonQuery("update crawl_data_feed set xml=@xml,modified=sysdate(),cpu_time=@cpu_time where hash=@hash",
				new MySqlParameter("hash", hash),
				new MySqlParameter("cpu_time", cpuTime),
				new MySqlParameter("xml", xml)
			);

			if (rows == 0)
			{
				Global.DBAccess.ExecuteNonQuery("insert into crawl_data_feed(hash,job_id,file_id,url,xml,created,modified,cpu_time) values(@hash,@job_id,@file_id,@url,@xml,sysdate(),sysdate(),@cpu_time)",
					new MySqlParameter("hash", hash),
					new MySqlParameter("cpu_time", cpuTime),
					new MySqlParameter("job_id", jobId),
					new MySqlParameter("file_id", fileId),
					new MySqlParameter("url", url),
					new MySqlParameter("xml", xml)
				);
			}
		}

		public string[] FetchMatch(string pattern, string url, string encoding)
		{
			string html = FetchWebPage(url, encoding); ;
			return Match(pattern, html);
		}

		public string[] FetchMatchByXPath(string xpath, string url, string encoding)
		{
			string html = FetchWebPage(url, encoding); ;
			return MatchByXPath(xpath, html);
		}

		public string FetchWebPage(string url, string encoding)
		{
			return Configs.GetPageData(url, encoding);
		}

		public string[] Match(string pattern, string input)
		{
			StringBuilder sb = new StringBuilder();
			Regex regex = new Regex(pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
			MatchCollection matchs = regex.Matches(input);

			if (matchs.Count == 0)
			{
				return null;
			}

			string[] arr = new string[matchs.Count];
			int i = 0;
			foreach (Match match in matchs)
			{
				arr[i] = ReplaceSpecialString(match.Result("$1"));
				i++;
			}
			return arr;
		}

		public string[] MatchPropertyByXPath(string xpath, string propNames, string input)
		{
			XmlDocument doc = FromHtml(input);
			XPathNavigator nav = doc.CreateNavigator();
			XPathNodeIterator nodes = nav.Select(xpath);
			if (nodes.Count == 0)
			{
				return null;
			}

			if(string.IsNullOrEmpty(propNames))
			{
				return null;
			}

			string[] arrPropNames = propNames.Split(',');
			List<string> result = new List<string>();
			while (nodes.MoveNext())
			{
				foreach(string name in arrPropNames)
				{
					string value = nodes.Current.GetAttribute(name,string.Empty);
					if(!string.IsNullOrEmpty(value))
					{
						result.Add(value);
					}
				}
			}
			return result.ToArray();
		}

		public string[] MatchByXPath(string xpath, string input)
		{
			XmlDocument doc = FromHtml(input);
			XPathNavigator nav = doc.CreateNavigator();
			XPathNodeIterator nodes = nav.Select(xpath);
			if (nodes.Count == 0)
			{
				return null;
			}

			int i = 0;
			string oldValue = null;
			string value = null;
			XPathNodeIterator childNodes = null;
			string[] arr = new string[nodes.Count];
			while (nodes.MoveNext())
			{
				oldValue = ReplaceSpecialString(nodes.Current.Value);
				if (nodes.Current.Name.ToLower() == "img")
				{
					oldValue = nodes.Current.GetAttribute("src", string.Empty);
				}

				childNodes = nodes.Current.Select("./*");
				while (childNodes.MoveNext())
				{
					childNodes.Current.DeleteSelf();
				}

				value = ReplaceSpecialString(nodes.Current.Value);
				if (string.IsNullOrEmpty(value))
				{
					value = oldValue;
				}
				
				arr[i] = value;
				i++;
			}
			return arr;
		}

		public string[] Split(string str, string c)
		{
			string[] arr = str.Split(c.ToArray());
			for (int i = 0; i < arr.Length; i++)
			{
				arr[i] = arr[i].Trim();
			}
			return arr;
		}

		public string FilterString(string str, string filterStrings)
		{
			for (int i = 0; i < filterStrings.Length; i++)
			{
				str  =str.Replace(filterStrings[i].ToString(), "");
			}
			return str;
		}

		public string ReplaceString(string str,string str1, string str2)
		{
			return str.Replace(str1, str2);
		}

		private string ReplaceSpecialString(string str)
		{
			str = str.Trim();
			string[] arr = new string[] { "\t", "\n", "\r", "&nbsp;" };
			for (int i = 0; i < arr.Length; i++)
			{
				str = str.Replace(arr[i].ToString(), "");
			}
			return str;
		}

		public string ClearHtmlTags(string HTML)
		{
			string[] Regexs ={
                        @"<script[^>]*?>.*?</script>",
                        @"<(\/\s*)?!?((\w+:)?\w+)(\w+(\s*=?\s*(([""'])(\\[""'tbnr]|[^\7])*?\7|\w+)|.{0})|\s)*?(\/\s*)?>",
                        @"([\r\n])[\s]+",
                        @"&(quot|#34);",
                        @"&(amp|#38);",
                        @"&(lt|#60);",
                        @"&(gt|#62);",
                        @"&(nbsp|#160);",
                        @"&(iexcl|#161);",
                        @"&(cent|#162);",
                        @"&(pound|#163);",
                        @"&(copy|#169);",
                        @"&#(\d+);",
                        @"-->",
                        @"<!--.*\n"
        };

			string[] Replaces ={
                            "",
                            "",
                            "",
                            "\"",
                            "&",
                            "<",
                            ">",
                            " ",
                            "\xa1", //chr(161),
                            "\xa2", //chr(162),
                            "\xa3", //chr(163),
                            "\xa9", //chr(169),
                            "",
                            "\r\n",
                            ""
        };

			string s = HTML;
			for (int i = 0; i < Regexs.Length; i++)
			{
				s = new Regex(Regexs[i], RegexOptions.Multiline | RegexOptions.IgnoreCase).Replace(s, Replaces[i]);
			}
			s.Replace("<", "");
			s.Replace(">", "");
			s.Replace("\r\n", "");
			return s;
		}

		public string BuildXmlData(int fileId, string url)
		{
			CrawlJobFileInfo fileInfo = Configs.GetCrawlFileById(fileId);
			if(fileInfo == null)
			{
				return null;
			}

			CrawlJobInfo jobInfo = Configs.GetCrawlJobById(fileInfo.JobId);
			try
			{
				string strHtml = Configs.GetPageData(url, jobInfo.Encoding);
				return BuildXmlData(fileId, url, jobInfo, Configs.GetCrawlFileFieldsByFileId(fileId), strHtml);
			}
			catch(Exception ex)
			{
				return ex.ToString();
			}
		}

		private string BuildXmlData(int fileId, string url, CrawlJobInfo jobInfo, CrawlJobFileFieldInfo[] fields, string html)
		{
			XmlDocument doc = FromHtml(html);
			XPathNavigator nav = doc.CreateNavigator();

			StringBuilder sb = new StringBuilder();
			sb.AppendFormat("<root fileid=\"{0}\" url='{1}'>", fileId, url);
			foreach (CrawlJobFileFieldInfo f in fields)
			{
				sb.Append(BuildXmlNodes(jobInfo,f, nav, url, html));
			}
			sb.Append("</root>");
			return sb.ToString();
		}

		private XmlDocument FromHtml(string html)
		{
			html = html.Replace("xmlns=\"http://www.w3.org/1999/xhtml\"", "");
			// setup SGMLReader
			Sgml.SgmlReader sgmlReader = new Sgml.SgmlReader();
			sgmlReader.DocType = "HTML";
			sgmlReader.WhitespaceHandling = WhitespaceHandling.All;
			sgmlReader.CaseFolding = Sgml.CaseFolding.ToLower;
			sgmlReader.InputStream = new StringReader(html);

			// create document
			XmlDocument doc = new XmlDocument();
			doc.PreserveWhitespace = true;
			doc.XmlResolver = null;
			doc.Load(sgmlReader);
			return doc;
		}

		private string BuildXmlNodes(CrawlJobInfo jobInfo, CrawlJobFileFieldInfo f, XPathNavigator nav, string url, string html)
		{
			Lua lua = null;
			LuaFunction fun = null;
			string value = null;
			string oldValue = null;
			XPathNodeIterator childNodes = null;
			try
			{
				if (!string.IsNullOrEmpty(f.Lua))
				{
					lua = new Lua();
					lua.RegisterFunction("split", this, this.GetType().GetMethod("Split"));
					lua.RegisterFunction("filterString", this, this.GetType().GetMethod("FilterString"));
					lua.RegisterFunction("match", this, this.GetType().GetMethod("Match"));
					lua.RegisterFunction("fetchMatch", this, this.GetType().GetMethod("FetchMatch"));
					lua.RegisterFunction("fetchMatchByXPath", this, this.GetType().GetMethod("FetchMatchByXPath"));
					lua.RegisterFunction("pushLink", this, this.GetType().GetMethod("PushLinkToWaitQueue"));
					lua.RegisterFunction("matchByXPath", this, this.GetType().GetMethod("MatchByXPath"));
					lua.RegisterFunction("fetchWebPage", this, this.GetType().GetMethod("FetchWebPage"));
					lua.RegisterFunction("clearHtmlTags", this, this.GetType().GetMethod("ClearHtmlTags"));
					lua.RegisterFunction("replaceString", this, this.GetType().GetMethod("ReplaceString"));
					lua.RegisterFunction("matchPropertyByXPath", this, this.GetType().GetMethod("MatchPropertyByXPath"));
					lua.RegisterFunction("delPageData", this, this.GetType().GetMethod("DelPageData"));
					lua.RegisterFunction("delCrawledUrl", this, this.GetType().GetMethod("DelCrawledUrl"));

					
					lua.DoString(f.Lua);
					fun = lua.GetFunction("doData");
				}

				StringBuilder sb = new StringBuilder();
				if (!string.IsNullOrEmpty(f.XPath))
				{
					XPathNodeIterator nodes = nav.Select(f.XPath);
					if (nodes.Count == 0)
					{
						if (!string.IsNullOrEmpty(f.Lua))
						{
							if (fun != null)
							{

								object[] objRet = fun.Call(f.FieldId, f.Code, html, jobInfo.Encoding);
								if (objRet != null && objRet[0] != null)
								{
									return objRet[0].ToString();
								}
								else
								{
									return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
								}
							}
						}
					}
				
					while (nodes.MoveNext())
					{
						oldValue = ReplaceSpecialString(nodes.Current.Value);
						if (nodes.Current.Name.ToLower() == "img")
						{
							oldValue = nodes.Current.GetAttribute("src", string.Empty);
						}

						childNodes = nodes.Current.Select("./*");
						while (childNodes.MoveNext())
						{
							childNodes.Current.DeleteSelf();
						}

						value = ReplaceSpecialString(nodes.Current.Value);
						if (string.IsNullOrEmpty(value))
						{
							value = oldValue;
						}

						if (fun != null)
						{
							object[] objRet = fun.Call(f.FieldId, f.Code, url, html, jobInfo.Encoding, value, nodes.Current.InnerXml);
							if (objRet != null && objRet[0] != null)
							{
								sb.Append(objRet[0].ToString());
							}
							else
							{
								sb.AppendFormat("<{0} id=\"{1}\"><![CDATA[{2}]]></{0}>", f.Code, f.FieldId, value);
							}
						}
						else
						{
							sb.AppendFormat("<{0} id=\"{1}\"><![CDATA[{2}]]></{0}>", f.Code, f.FieldId, value);
						}
					}
				}
				else
				{
					if (fun != null)
					{
						object[] objRet = fun.Call(f.FieldId, f.Code, url, html, jobInfo.Encoding);
						if (objRet != null && objRet[0] != null)
						{
							return objRet[0].ToString();
						}
						else
						{
							return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
						}
					}
					else
					{
						return string.Format("<{0} id=\"{1}\"></{0}>", f.Code, f.FieldId);
					}
				}
				return sb.ToString();
			}
			catch (Exception ex)
			{
				throw new Exception("\"" + f.Name + "(" + f.Code + ")\"节点发生异常,信息：" + ex.ToString());
			}
			finally
			{
				if (fun != null)
				{
					fun.Dispose();
				}

				if (lua != null)
				{
					lua.Dispose();
				}

				

				lua = null;
				fun = null;
				value = null;
				oldValue = null;
				childNodes = null;
			}
		}

		private CrawlJobFileInfo MathFile(CrawlJobFileInfo[] fileInfos, string url)
		{
			foreach (CrawlJobFileInfo item in fileInfos)
			{
				Regex regex = new Regex(string.Format("^{0}$", item.UrlPattern));
				if (regex.IsMatch(url))
				{
					return item;
				}
			}
			return null;
		}

		
	}

	class FeedSendThread : ThreadBase
	{
		protected override void Run()
		{
			string hash = null;
			string xml = null;
			int jobId = 0;
			int fileId = 0;
			CrawlJobInfo jobInfo = null;
			WebClient wc = null;
			while (!isStop)
			{
				try
				{
					this.PopFeed(out xml, out hash,out jobId, out fileId);
					if (hash != null)
					{
						using (wc = new WebClient())
						{
							wc.Encoding = System.Text.Encoding.UTF8;
							jobInfo = Configs.GetCrawlJobById(jobId);
							var result = jobInfo.Files.Where<CrawlJobFileInfo>(n => n.FileId == fileId);
							CrawlJobFileInfo fileInfo = result.ToArray()[0];
							if (string.IsNullOrEmpty(fileInfo.FeedRecUrl))
							{
								Global.DBAccess.ExecuteNonQuery("update crawl_data_feed set status=3,deal_time=sysdate(),push_time=sysdate() where hash=@hash", new MySqlParameter("hash", hash));
							}
							else
							{
								string strRet = wc.UploadString(fileInfo.FeedRecUrl, xml);
								Global.DBAccess.ExecuteNonQuery("update crawl_data_feed set status=2,deal_time=sysdate(),push_time=sysdate(),err_info=@err_info where hash=@hash", new MySqlParameter("hash", hash), new MySqlParameter("err_info", strRet));
							}
							result = null;
						}
					}
				}
				catch (Exception ex)
				{
					if (hash != null)
					{
						Global.DBAccess.ExecuteNonQuery("update crawl_data_feed set status=2,err_num=err_num+1,err_info=@err_info,deal_time=sysdate(),push_time=sysdate() where hash=@hash", new MySqlParameter("hash", hash), new MySqlParameter("err_info", ex.Message));
					}
				}
				if (hash == null)
				{
					Thread.Sleep(10000);
				}
				else
				{
					Thread.Sleep(1000);
				}

				hash = null;
				xml = null;
			}
		}

		private void PopFeed(out string xml, out string hash,out int jobId, out int fileId)
		{
			xml = null;
			hash = null;
			jobId = 0;
			fileId = 0;
			DataTable dt = null;
			MySqlTransaction tran = null;
			try
			{
				tran = Global.DBAccess.BeginTransaction();
				dt = Global.DBAccess.GetTable(tran.Connection, "select xml,hash,file_id,job_id from crawl_data_feed where status=0 limit 100");
				if (dt.Rows.Count > 0)
				{
					xml = dt.Rows[0]["xml"].ToString();
					hash = dt.Rows[0]["hash"].ToString();
					jobId = Convert.ToInt32(dt.Rows[0]["job_id"]);
					fileId = Convert.ToInt32(dt.Rows[0]["file_id"]);
					Global.DBAccess.ExecuteNonQuery(tran.Connection, "update crawl_data_feed set status=1,deal_time=sysdate() where hash=@hash", new MySqlParameter("hash", hash));
				}
				tran.Commit();
			}
			catch
			{
				if (tran != null)
				{
					tran.Rollback();
				}
			}
			finally
			{
				if (tran != null)
				{
					tran.Connection.Close();
				}
			}
		}
	}

	public class HttpURLConnectionWrapper
	{
      
    //简单的CookieManager  
    CookieManager cookieManager = CookieManager.Instance;
    public HttpURLConnectionWrapper(string url) {  
    }  
    /** 
     * 填充Request Header信息 
     */  
    private void FillRequestHeadField(HttpWebRequest httpRequest){
		string cookie = cookieManager.GetCookies(httpRequest.RequestUri.Host);
		httpRequest.Method = "GET";
		httpRequest.UserAgent = "Mozilla/5.0 (Windows NT 5.1; rv:5.0) Gecko/20100101 Firefox/5.0";
		httpRequest.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
		httpRequest.Referer = httpRequest.RequestUri.Host;
		httpRequest.Headers.Add("Cache-Control", "max-age=0");
		httpRequest.Headers.Add("Accept-Language", "zh-CN,zh;q=0.8");
		httpRequest.Credentials = CredentialCache.DefaultCredentials;
		if(!string.IsNullOrEmpty(cookie))
		{
			httpRequest.Headers.Add("Cookie", cookie);
		}
		httpRequest.KeepAlive = true;
		httpRequest.ReadWriteTimeout = 30 * 1000;
    }

	public string GetWebByWY(string url, string encoding, string cookie)
	{
		string oldUrl = url;
		using (WebClient wc = new WebClient())
		{
			url = string.Format("http://fanyi.youdao.com/WebpageTranslate?keyfrom=webfanyi.top&url={0}&type=SP2ZH_CN", System.Web.HttpUtility.UrlEncode(url));
			wc.Encoding = System.Text.Encoding.UTF8;
			string str = wc.DownloadString(url);
			if(str.IndexOf("为不支持的链接类型") >= 0)
			{
				throw new Exception("为不支持的链接类型");
			}
			else
			{
				Regex regex = new Regex("getElementById\\(\"youdaotranslateIframe\"\\)\\.src=\"(.+?)\";");
				url = regex.Match(str).Result("$1");

				str = wc.DownloadString(url);
				wc.Encoding = System.Text.Encoding.UTF8;
				regex = new Regex("<span class=\"youdao-hiddenspan\">.+?</span>");
				str = regex.Replace(str, "");

				regex = new Regex(@"javascript:changelink\('(.+?)','SP2ZH_CN'\);");
				str = regex.Replace(str, "$1");
				return str;
			}
		}
	}

	public string GetWebByBing(string reqUrl)
	{
		string inCookie = null;
		string outCookie = null;
		string url = null;
		string str = null;
		try
		{
			str = GetWeb0("http://www.microsofttranslator.com/bv.aspx?from=lt&to=zh-chs&a=" + HttpUtility.UrlEncode(reqUrl), "utf-8", null, out outCookie);

			Regex regex = new Regex(@"BV.InitRoot\('http\\x3a\\x2f\\x2f(.+?)\\x2fbvsandbox.aspx',");
			Match m = regex.Match(str);
			if (m.Success)
			{
				inCookie = outCookie;
				string ip = m.Result("$1");
				string sign = null;
				url = string.Format("http://{0}/bvsandbox.aspx?&dl=zh-CHS&from=lt&to=zh-CHS&a={1}", ip, HttpUtility.UrlEncode(reqUrl));
				str = GetWeb0(url, "utf-8", inCookie, out outCookie);

				regex = new Regex(@"BV.Init\('http:\/\/www.microsofttranslator.com/', '(.+?)', '(.+?)'\);");
				m = regex.Match(str);
				if (m.Success)
				{
					ip = m.Result("$1");
					sign = m.Result("$2");

					inCookie = outCookie;
					url = string.Format("{0}proxy.ashx?h={1}&a={2}", ip, sign, HttpUtility.UrlEncode(reqUrl));
					str = GetWeb0(url, "utf-8", inCookie, out outCookie);
				}
			}
		}
		catch (Exception ex)
		{
			throw ex;
		}
		return str;
	}

	private string GetWeb0(string url, string encoding, string inCookie, out string outCookie)
	{
		outCookie = null;
		string html = null;
		HttpWebResponse response = null;
		StreamReader stream = null;
		try
		{
			HttpWebRequest webRequest = (HttpWebRequest)WebRequest.Create(url);
			webRequest.UserAgent = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.83 Safari/537.1";
			webRequest.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
			webRequest.Referer = url;
			webRequest.Headers.Add("Cache-Control", "max-age=0");
			webRequest.Credentials = CredentialCache.DefaultCredentials;
			webRequest.KeepAlive = true;
			if (string.IsNullOrEmpty(inCookie))
			{
				webRequest.Headers.Add("Cookie", inCookie);
			}

			webRequest.ReadWriteTimeout = 30 * 1000;
			response = (HttpWebResponse)webRequest.GetResponse();
			if (response.StatusCode == HttpStatusCode.OK)
			{
				outCookie = response.Headers.Get("Set-Cookie");
				stream = new StreamReader(response.GetResponseStream(), Encoding.GetEncoding(encoding));
				html = stream.ReadToEnd();
			}
		}
		catch (Exception ex)
		{
			throw ex;
		}
		finally
		{
			if (response != null)
				response.Close();

			if (stream != null)
				stream.Close();
		}
		return html;
	}

	public string GetWeb(string url, string encoding, string cookie)
	{
		string outCookie= string.Empty;
		return GetWeb0(url, encoding, cookie, out outCookie);
	}

		public string GetWeb2(string url, string encoding, string cookie)
		{
			string html = null;
			HttpWebResponse response = null;
			StreamReader stream = null;
			HttpWebRequest webRequest = null;
			try
			{
				webRequest = (HttpWebRequest)WebRequest.Create(url);
				webRequest.Proxy = null;
				FillRequestHeadField(webRequest);
				response = (HttpWebResponse)webRequest.GetResponse();
				ResolveCookies(response, webRequest);
				if (response.StatusCode == HttpStatusCode.OK)
				{
					stream = new StreamReader(response.GetResponseStream(), Encoding.GetEncoding(encoding));
					html = stream.ReadToEnd();
				}
			}
			catch (WebException)
			{
				CookieManager.Instance.RemoveCookies(webRequest.RequestUri.Host);
				if (response != null)
					response.Close();

				if (stream != null)
					stream.Close();

				webRequest = (HttpWebRequest)WebRequest.Create(url);
				try
				{
					response = (HttpWebResponse)webRequest.GetResponse();
					FillRequestHeadField(webRequest);
					if (response.StatusCode == HttpStatusCode.OK)
					{
						stream = new StreamReader(response.GetResponseStream(), Encoding.GetEncoding(encoding));
						html = stream.ReadToEnd();
					}
				}
				catch
				{
				}
			}
			catch (Exception)
			{
				return null;
			}
			finally
			{
				if (response != null)
				{
					response.Close();
					response = null;
				}

				if (stream != null)
				{
					stream.Close();
					stream.Dispose();
					stream = null;
				}
			}
			return html;
		}

		private void ResolveCookies(HttpWebResponse response, HttpWebRequest request)
		{
			string setCookies = response.Headers.Get("Set-Cookie");  
			if(!string.IsNullOrEmpty(setCookies)){
				cookieManager.SetCookies(request.RequestUri.Host, setCookies);  
        } 
    }  
}  

	public class CookieManager{  
    private static CookieManager cookieManager = new CookieManager();
	private Dictionary<String, Dictionary<String, String>> cookies = new Dictionary<String, Dictionary<String, String>>();  
      

    private CookieManager(){}
	public static CookieManager Instance
	{
		get
		{
			return cookieManager;
		}
	}

    public String GetCookies(String domain){
		Dictionary<String, String> domainCookies = null;
		cookies.TryGetValue(GetTopLevelDomain(domain), out domainCookies);  
        if(domainCookies != null){  
            StringBuilder sb = new StringBuilder();  
            bool isFirst = true;  
            foreach(KeyValuePair<string,string> kv in domainCookies){  
                if(!isFirst){  
                    sb.Append("; ");  
                }else{  
                    isFirst = false;  
                }  
                sb.Append(kv.Key)  
                  .Append("=")  
                  .Append(kv.Value);  
            }  
            return sb.ToString();  
        }  
        return "";  
    }  
      
    /** 
     * 
     * 设置Cookie值 
     * @param domain 
     * @param cookiesString 
     */  
    public void SetCookies(String domain,String cookiesString){
		string topDomain = GetTopLevelDomain(domain);
		Dictionary<String, String> domainCookies = null;
		cookies.TryGetValue(topDomain, out domainCookies);
        if(domainCookies == null){  
            domainCookies = new Dictionary<String, String>();
			cookies.Add(topDomain, domainCookies);  
        }  
        String[] arrCookies = cookiesString.Split(';');  
        foreach (String cookie in arrCookies) {  
            if(!string.IsNullOrEmpty(cookie) && cookie.IndexOf("=") >= 0){  
				string[] arr = cookie.Split('=');
                domainCookies[arr[0]] = arr[1]; 
            }  
        }  
    }  
    /** 
     * 删除域名下所有的Cookie 
     * @param domain 
     */  
    public void RemoveCookies(String domain){  
        cookies.Remove(GetTopLevelDomain(domain));  
    }  
      
    /** 
     * 获取域名的顶级域名 
     * @param domain 
     * @return 
     */  
    public String GetTopLevelDomain(String domain){  
        if(domain == null){  
            return null;  
        }  
        if(!domainToTopLevelDomainMap.ContainsKey(domain)){
			String[] splits = domain.Split('.');
			domainToTopLevelDomainMap.Add(domain, (splits[splits.Length - 2] + "." + splits[splits.Length - 1]));  
        }  
        return domainToTopLevelDomainMap[domain];  
    }  
    /** 
     * 存储域名与其顶级域名之间映射关系，避免重复的计算顶级域名 
     */
	private Dictionary<String, String> domainToTopLevelDomainMap = new Dictionary<String, String>();  
}  
}
