﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using Clipping.Infrastructure.Interfaces;
using Clipping.Infrastructure.Models;
namespace Clipping.SearchEngines
{
	public class SearchEngine : ISearchEngine
	{

		protected IHtmlDownload _htmlDownloader;
		protected ISearchEnginePageParse _parser;
		protected ISearchUrlConstruct _urlConstructor;
		protected Encoding _encoding;
		protected Uri _currentpage;
		protected Stack<Uri> _prePageUrls = new Stack<Uri>();
		protected Stack<Uri> _nextPageUrls = new Stack<Uri>();
		protected Dictionary<Uri, List<News>> _cache = new Dictionary<Uri, List<News>>();
		//public SearchEngine(
		//    string name,
		//    string url,
		//    ISearchUrlConstruct urlConstructor,
		//    IHtmlDownload htmlDownloader,
		//    ISearchEnginePageParse parser,
		//    Encoding encoding = null)
		//    : this(name, urlConstructor, htmlDownloader, parser, encoding)
		//{
		//    //Contract.Requires(Uri.IsWellFormedUriString(url, UriKind.Absolute));
		//    this.InitPageUrl = new Uri(url);
		//    this._currentpage = this.InitPageUrl;
		//    this._initialized = true;
		//}
		public SearchEngine(
			string name,
			ISearchUrlConstruct urlConstructor,
			IHtmlDownload htmlDownloader,
			ISearchEnginePageParse parser,
			Encoding encoding = null)
		{


			//Contract.Requires(htmlDownloader != null);
			//Contract.Requires(parser != null);
			//Contract.Requires(urlConstructor != null);
			this.SearchEngineName = name;
			this._htmlDownloader = htmlDownloader;
			this._parser = parser;
			this._urlConstructor = urlConstructor;
			this._encoding = encoding;
		}
		#region 私有方法
		protected void clearCache()
		{
			if (this._prePageUrls == null)
				this._prePageUrls = new Stack<Uri>();
			else
				this._prePageUrls.Clear();
			if (this._nextPageUrls == null)
				this._nextPageUrls = new Stack<Uri>();
			else
				this._nextPageUrls.Clear();
			if (this._cache == null)
				this._cache = new Dictionary<Uri, List<News>>();
			else
				this._cache.Clear();
		}


		#endregion

		#region 属性
		public bool CanGetNextPage
		{
			get
			{
				if (this.Initialized && this._nextPageUrls != null && this._nextPageUrls.Count > 0)
				{
					return true;
				}
				else
				{
					return false;
				}
			}

		}

		public bool CanGetPrePage
		{

			get
			{
				if (this.Initialized && this._prePageUrls != null && this._prePageUrls.Count > 0)
				{
					return true;
				}
				else
				{
					return false;
				}
			}
		}


		public Uri InitPageUrl
		{
			get;
			private set;
		}
		public Uri CurrentPageUrl
		{
			get { return _currentpage; }
		}

		public int CurrentPageNo
		{
			get
			{
				if (this._prePageUrls == null)
				{
					return 0;
				}
				else
				{
					return this._prePageUrls.Count + 1;
				}

			}
		}

		public string SearchEngineName
		{
			get;
			private set;
		}

		public bool Initialized
		{
			get;
			private set;
		}
		#endregion

		#region  公共方法
		public void InitNewSearch(string url)
		{
			//Contract.Requires(Uri.IsWellFormedUriString(url, UriKind.Absolute));
			this.InitPageUrl = new Uri(url);
			this._currentpage = this.InitPageUrl;
			clearCache();
			this.Initialized  = true;
		}
		public void InitNewSearch(SearchObject searchObj)
		{
			this.InitPageUrl = this._urlConstructor.ConstructUrl(searchObj);
			this._currentpage = this.InitPageUrl;
			clearCache();
			this.Initialized = true;
		}

		public List<News> GetCurrentPageNews()
		{
			if (!this.Initialized)
				throw new Exception("未初始化，请调用InitNewSearch方法进行初始化");
			List<News> rtn = null;
			if (this._currentpage != null)
			{
				if (this._nextPageUrls == null)
					this._nextPageUrls = new Stack<Uri>();
				if (this._cache == null)
					this._cache = new Dictionary<Uri, List<News>>();
				if (this._cache.ContainsKey(this._currentpage))
				{
					rtn = this._cache[this._currentpage];
				}
				else
				{
					string html = this._htmlDownloader.GetHtml(this._currentpage, this._encoding);


					if (!string.IsNullOrEmpty(html))
					{
						string nextPageUrl;
						this._parser.Parse(html, out rtn, out nextPageUrl);

						//加入缓存
						this._cache.Add(this._currentpage, rtn);
						//取得下一页地址，如果没有下一页地址，则为null
                        if (!string.IsNullOrEmpty(nextPageUrl))
                        {
                            Uri next = this._urlConstructor.ConstructAbsoluteUrl(this._currentpage, nextPageUrl);
                            if (next != null)
                                this._nextPageUrls.Push(next);
                        }
					}
				}
			}
			return rtn;

		}


		public List<News> NavigateToNextPage()
		{
			if (!this.Initialized)
				throw new Exception("未初始化，请调用InitNewSearch方法进行初始化");
			List<News> rtn = null;
			if (this._currentpage != null && this._nextPageUrls != null && this._nextPageUrls.Count > 0)
			{
				if (this._prePageUrls == null)
					this._prePageUrls = new Stack<Uri>();
				this._prePageUrls.Push(this._currentpage);
				this._currentpage = this._nextPageUrls.Pop();
				rtn = GetCurrentPageNews();
			}
			return rtn;
		}

		public List<News> NavigateToPrePage()
		{
			if (!this.Initialized)
				throw new Exception("未初始化，请调用InitNewSearch方法进行初始化");
			List<News> rtn = null;
			if (this._currentpage != null && this._prePageUrls != null && this._prePageUrls.Count > 0)
			{
				if (this._nextPageUrls == null)
					this._nextPageUrls = new Stack<Uri>();
				this._nextPageUrls.Push(this._currentpage);
				this._currentpage = this._prePageUrls.Pop();
				rtn = GetCurrentPageNews();
			}
			return rtn;
		}

		public void GetSameNews(News news)
		{
			//Contract.Requires(news != null);

			Uri address = this._urlConstructor.ConstructAbsoluteUrl(this.CurrentPageUrl, news.SameNewsUrl); ;
			while (address != null)
			{
				string html = this._htmlDownloader.GetHtml(address, this._encoding);
				List<News> sameNews;
				string nextPageUrl;
				this._parser.Parse(html, out sameNews, out nextPageUrl);

				if (sameNews != null)
				{
					foreach (var n in sameNews)
					{

							news.AddSameNews(n);
						
					}
				}
				address = this._urlConstructor.ConstructAbsoluteUrl(address, nextPageUrl);
				if (address != null)
					Thread.Sleep(1000);
			}

		}
		#endregion

	}
}
