#region Copyright (c) 2003, newtelligence AG. All rights reserved.
/*
// Copyright (c) 2003, newtelligence AG. (http://www.newtelligence.com)
// Original BlogX Source Code: Copyright (c) 2003, Chris Anderson (http://simplegeek.com)
// All rights reserved.
//  
// Redistribution and use in source and binary forms, with or without modification, are permitted 
// provided that the following conditions are met: 
//  
// (1) Redistributions of source code must retain the above copyright notice, this list of 
// conditions and the following disclaimer. 
// (2) Redistributions in binary form must reproduce the above copyright notice, this list of 
// conditions and the following disclaimer in the documentation and/or other materials 
// provided with the distribution. 
// (3) Neither the name of the newtelligence AG nor the names of its contributors may be used 
// to endorse or promote products derived from this software without specific prior 
// written permission.
//      
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS 
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY 
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER 
// IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 
// OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// -------------------------------------------------------------------------
//
// Original BlogX source code (c) 2003 by Chris Anderson (http://simplegeek.com)
// 
// newtelligence is a registered trademark of newtelligence Aktiengesellschaft.
// 
// For portions of this software, the some additional copyright notices may apply 
// which can either be found in the license.txt file included in the source distribution
// or following this notice. 
//
*/
#endregion

using System;
using System.Collections;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Text.RegularExpressions;
using System.Threading;
using System.Web;
using System.Web.Mail;
using System.Xml.Serialization;
using CookComputing.XmlRpc;
using newtelligence.DasBlog.Runtime.Proxies;
using newtelligence.DasBlog.Util;

namespace newtelligence.DasBlog.Runtime
{   
	/// <summary>
	/// 
	/// </summary>
	public class BlogDataServiceFactory
	{
		private static Hashtable services = new Hashtable();
        
		/// <summary>
		/// 
		/// </summary>
		/// <param name="contentLocation"></param>
		/// <returns></returns>
		public static IBlogDataService GetService(string contentLocation, ILoggingDataService loggingService)
		{
			IBlogDataService service;

			lock( services.SyncRoot )
			{
				service = services[contentLocation.ToUpper()] as IBlogDataService;
				if ( service == null )
				{
					service = new BlogDataServiceXml(contentLocation, loggingService);
					services.Add( contentLocation.ToUpper(), service );
				}
			}
			return service;
		}

		public static bool RemoveService(string contentLocation)
		{
			lock( services.SyncRoot )
			{
				if (services.ContainsKey(contentLocation.ToUpper()))
				{
					services.Remove(contentLocation.ToUpper());
					return true;
				}
			}

			return false;
		}
	}   

	internal class BlogDataServiceXml : IBlogDataService
	{
		private string contentBaseDirectory;
		private DataManager data;
		private AutoResetEvent trackingQueueEvent;
		private Queue trackingQueue;
		private Thread trackingHandlerThread;
		private AutoResetEvent sendMailInfoQueueEvent;
		private Queue sendMailInfoQueue;
		private Thread sendMailInfoHandlerThread;
		private ILoggingDataService loggingService;

		protected string ContentBaseDirectory
		{
			get
			{
				return contentBaseDirectory;
			}
		}

        /// <summary>
        /// The BlogDataServiceXml constructor is entrypoint for the dasBlog Runtime.
        /// </summary>
        /// <param name="contentLocation">The path of the content directory</param>
        /// <param name="loggingService">The <see cref="ILoggingDataService"/></param>
		internal BlogDataServiceXml(string contentLocation, ILoggingDataService loggingService)
		{
			contentBaseDirectory = contentLocation;
			this.loggingService = loggingService;
			if ( !Directory.Exists(contentBaseDirectory) )
			{
				throw new ArgumentException(
					String.Format("Invalid directory {0}", contentBaseDirectory),
					"contentLocation");
			}
			data = new DataManager();
			data.Resolver = new ResolveFileCallback(this.GetAbsolutePath);
			trackingQueue = new Queue();
			trackingQueueEvent = new AutoResetEvent(false);
			trackingHandlerThread = new Thread(new ThreadStart(this.TrackingHandler));
			trackingHandlerThread.IsBackground = true;
			trackingHandlerThread.Start();
			sendMailInfoQueue = new Queue();
			sendMailInfoQueueEvent = new AutoResetEvent(false);
			sendMailInfoHandlerThread = new Thread(new ThreadStart(this.SendMailHandler));
			sendMailInfoHandlerThread.IsBackground = true;
			sendMailInfoHandlerThread.Start();

			//OmarS: now we want to initialize the EntryIdCache so this doesn't happen elsewhere later on
			this.GetEntryIdCache();
		}

        
		protected string GetAbsolutePath(string file)
		{
			return Path.Combine(contentBaseDirectory,file);
		}

		private EntryIdCache _onlyEntryIdCacheOfOnlyPublicEntries;
		private object _onlyEntryIdCacheOfOnlyPublicEntriesLock = new object();
        protected EntryIdCache GetEntryIdCache()
        {
            lock(_onlyEntryIdCacheOfOnlyPublicEntriesLock)
            {
                //			EntryIdCache ecache = new EntryIdCache();
                //			ecache.Ensure(data);
                //			return ecache;
                EntryIdCache ecache = new EntryIdCache();
                ecache.Ensure(data);
                // Overly complicated:  
                // We save (cache) the EntryIdCache and use the saved version
                // if not the admin and the ChangeNumber hasn't changed.
                if(!Thread.CurrentPrincipal.IsInRole("admin"))
                {
                    if(_onlyEntryIdCacheOfOnlyPublicEntries == null ||
                        (_onlyEntryIdCacheOfOnlyPublicEntries.ChangeNumber != ecache.ChangeNumber) )
                    {
                        EntryIdCacheEntryCollection entryCacheCollection = new EntryIdCacheEntryCollection();
                        foreach(EntryIdCacheEntry entryIdCacheEntry in ecache.Entries)
                        {
                            if(entryIdCacheEntry.IsPublic)
                            {
                                entryCacheCollection.Add(entryIdCacheEntry);
                            }
                        }
                        ecache.Entries = entryCacheCollection;
                        _onlyEntryIdCacheOfOnlyPublicEntries = ecache;
                    }
                    else
                    {
                        // Since change numbers are the same return the saved (cached) version.
                        ecache = _onlyEntryIdCacheOfOnlyPublicEntries;
                    }
                }
                return ecache;
            }
        }

		protected DateTime GetDateForEntry( string entryId )
		{
			if (entryId.Length == 0)
			{
				HttpContext.Current.Trace.Write("entryId is empty!");
			}
			EntryIdCache ecache = GetEntryIdCache();

			DateTime foundDate = ecache.GetDateFromEntryId(entryId);	
			if(foundDate == DateTime.MinValue)
			{
				foundDate = ecache.GetDateFromCompressedTitle(entryId);
			}

			if ((foundDate == DateTime.MinValue))
			{
				StackTrace st = new StackTrace();
				try
				{
					// if we are running from the runtime, but not asp.net this will fail
					loggingService.AddEvent(new EventDataItem(EventCodes.Error,"GetDateForEntry: Can't find \"" + entryId + "\" " + st.ToString() + " " + HttpContext.Current.Request.RawUrl,String.Empty));
				}
				catch
				{
					loggingService.AddEvent(new EventDataItem(EventCodes.Error,"GetDateForEntry: Can't find \"" + entryId + "\" " + st.ToString(),String.Empty));					
				}
			}

			return foundDate;
		}

		DayEntry InternalGetDayEntry( DateTime date )
		{
			DayEntry result = null;
			
			if (data.Days.ContainsKey(date))
			{
				DayEntry day = data.Days[date];
				day.Load(data);
				result = day;
			}
			
			/*
			foreach (DayEntry day in data.Days)
			{
				if (day.DateUtc == date)
				{
					day.Load(data);
					result = day;
					break;
				}
			}
			*/

			if(result == null)
			{
				result = new DayEntry();
				result.Initialize();
				result.DateUtc = date;
				result.Save(data);
				data.IncrementEntryChange();
			}

			return result;
		}

		DayEntry IBlogDataService.GetDayEntry( DateTime date )
		{
			return InternalGetDayEntry( date );
		}

		/// <summary>
		/// Returns loaded DayEntries that correspond to the criteria
		/// </summary>
		/// <param name="maxDays"></param>
		/// <param name="dayEntryCriteria"></param>
		/// <returns></returns>
		protected DayEntryCollection InternalGetDayEntries(int maxDays, 
			DayEntryCollection.CriteriaHandler dayEntryCriteria)
		{
			DayEntryCollection days = new DayEntryCollection();
			DayEntryCollection dataDays = data.Days;

			for (int j=0;j<dataDays.Count;j++ )
			{
				if(dayEntryCriteria != null)
				{
					bool include = true;
					foreach(DayEntryCollection.CriteriaHandler criteria in dayEntryCriteria.GetInvocationList())
					{
						if(!criteria(dataDays[j]))
						{
							include = false;
						}
					}
					if(include)
					{
						days.Add(dataDays[j]);
					}
				}
				else
				{
					days.Add(dataDays[j]);
				}
				if ( days.Count >= maxDays )
					break;
			}

			return days;
		}


		/// <summary>
		/// Load the DayEntries that match the criteria of the includeDayEntry delegate
		/// </summary>
		/// <param name="dayEntryCriteria">A delegate that returns true for each DayEntry that should be included in the DayEntryCollection returned</param>
		/// <returns></returns>
		protected DayEntryCollection InternalGetDayEntries(
			DayEntryCollection.CriteriaHandler dayEntryCriteria)
		{
			return InternalGetDayEntries(int.MaxValue, dayEntryCriteria);
		}

		/// <summary>
		/// Gets a collection of <see cref="newtelligence.DasBlog.Runtime.DayEntry"/> structures for dates starting at 
		/// the <paramref name="startDate"/> backwards for at most 
		/// <paramref name="maxDays"/>.
		/// </summary>
		/// <param name="startDate">Date at which to start collecting DayEntry structures</param>
		/// <param name="maxDays">Maximum number of days to return. This number relates to
		/// days actually found and not to calendar days.</param>
		/// <returns>A DayEntryCollection containing the collected results.</returns>
		protected DayEntryCollection InternalGetDayEntries( DateTime startDate, TimeZone tz, int maxDays)
		{
			// we look one day ahead into "UTC" future in order to grab 
			// the timezones ahead of UTC.
			return InternalGetDayEntries(maxDays, 
				DayEntryCollection.CriteriaHandlerFactory.OccursBefore(startDate.Date.AddDays(1)));

		}

		/// <summary>
		/// Gets the DayExtra structure for a given date.
		/// </summary>
		/// <param name="date">Date for which the structure shall be returned.</param>
		/// <returns>A day extra structure for the given day.</returns>
		protected DayExtra InternalGetDayExtra(DateTime date)
		{
			DayExtra extra = data.GetDayExtra(date);
			// extra.Load( data ); // we don't need to call this twice
			return extra;
		}
        
		DayExtra IBlogDataService.GetDayExtra(DateTime date)
		{
			return InternalGetDayExtra( date );
		}
        
		protected void PingWeblogsWorker( object argument )
		{
			WeblogUpdatePingInfo weblogInfo = argument as WeblogUpdatePingInfo;

			try
			{
				if ( weblogInfo.NotifyWeblogsCom )
				{
					WeblogUpdatesClientProxy updates = new WeblogUpdatesClientProxy();
					WeblogUpdatesReply reply =  updates.Ping( weblogInfo.BlogName, weblogInfo.BlogUrl );
					if ( reply.flerror )
					{
						ErrorTrace.Trace(TraceLevel.Error, "Notifying weblogs.com: "+ reply.message );
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.PingWeblogsError,reply.message,weblogInfo.BlogUrl));
						}
					}
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.Error,
						e.ToString().Replace("\n","<br>"),
						"PingWeblogsWorker, pinging Weblogs.com"));
				}
			}

			try
			{
				if ( weblogInfo.NotifyBloGs )
				{
					ExtendedWeblogUpdatesClientProxy updates = new ExtendedWeblogUpdatesClientProxy();
					WeblogUpdatesReply reply = updates.ExtendedPing( weblogInfo.BlogName, weblogInfo.BlogUrl, weblogInfo.CheckUrl, weblogInfo.RssUrl );
					if ( reply.flerror )
					{
						ErrorTrace.Trace(TraceLevel.Error, "Notifying blo.gs: "+ reply.message );
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.PingBloGsError,reply.message,weblogInfo.BlogUrl));
						}
					}
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.Error,
						e.ToString().Replace("\n","<br>"),
						"PingWeblogsWorker, pinging blo.gs"));
				}
			}
		}

		protected class PingbackJob
		{
			internal PingbackInfo info;
			internal Entry entry;

			internal PingbackJob( PingbackInfo info, Entry entry )
			{
				this.info = info;
				this.entry = entry;
			}
		}

		protected void Pingback( string sourceUri, string pingbackService, string pingbackTarget, string entryTitle )
		{
			try
			{
				if ( pingbackService != null &&  pingbackTarget != null )
				{
					PingbackClientProxy client = new PingbackClientProxy();
					client.UserAgent = "newtelligence dasBlog/1.7";
					client.Url = pingbackService;
					client.ping( sourceUri, pingbackTarget);

					this.loggingService.AddEvent(
						new EventDataItem(
						EventCodes.PingbackSent,
						entryTitle,
						sourceUri,
						pingbackTarget));
				}
			}
			catch(XmlRpcFaultException xmlFault)
			{
				ErrorTrace.Trace(TraceLevel.Error,xmlFault);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.PingbackServerError,
						String.Format("{0}: {1}", xmlFault.FaultCode, xmlFault.FaultString),
						sourceUri + "," + pingbackTarget));
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.PingbackServerError,
						e.ToString().Replace("\n","<br>"),
						sourceUri + "," + pingbackTarget));
				}
			}
		}

		protected void PingbackWorker( object argument )
		{
			PingbackJob job = argument as PingbackJob;
			Uri localUri = new Uri(job.info.SourceUrl);
			
			if ( job.entry.Content != null &&
				job.entry.Content.Length > 0 )
			{
				Regex anchors = new Regex("href\\s*=\\s*(?:(?:\\\"(?<url>[^\\\"]*)\\\")|(?<url>[^\\s]* ))");

				foreach(Match match in anchors.Matches(job.entry.Content)) 
				{ 
					string url = match.Groups["url"].Value;
					
					if (url.StartsWith("http")) // don't pass in a url withouth http into Uri constructor
					{
						try
						{
							Uri externalUri = new Uri( url );

							// don't autopingback ourselves
							/*
							string local = localUri.ToString().Substring(0, localUri.ToString().LastIndexOf('/'));
							string external = externalUri.ToString().Substring(0, externalUri.ToString().LastIndexOf('/'));
							*/
							if ( externalUri.Scheme == Uri.UriSchemeHttp /*&&
								!externalUri.GetLeftPart(UriPartial.Path).ToUpper().Equals(localUri.GetLeftPart(UriPartial.Path).ToUpper()) &&
								!external.ToUpper().Equals(local)*/)
							{
								// we're auto-detecting pingbacks and while we do that
								// we send a trackback in hope that the server may understand
								// that already. We're appending to the target URL and 
								// therefore shouldn't interfere with the server logic in case 
								// the identifiers collide with those the server is using.

								HttpWebRequest webRequest = WebRequest.Create(externalUri) as HttpWebRequest;
								webRequest.Method="GET";
								webRequest.UserAgent = "newtelligence dasBlog/1.7";

								HttpWebResponse response = webRequest.GetResponse() as HttpWebResponse;
								
								// now we want to get the page contents of the target body
								string requestBody = null;
								using (StreamReader requestReader = new StreamReader(response.GetResponseStream()))
								{
									requestBody = requestReader.ReadToEnd();
								}
								response.Close();

								// we will try a trackback first before a pingback
								// we need to auto discover the trackback url
								// http://www.movabletype.org/docs/mttrackback.html

								string trackbackUrl = GetTrackbackLink(requestBody);
								if (trackbackUrl != null)
								{
									TrackbackInfo info = new TrackbackInfo(trackbackUrl, job.info.SourceUrl, job.info.SourceTitle, job.info.SourceExcerpt, job.info.SourceBlogName);
									TrackbackWorker(new TrackbackJob(info, job.entry));
								}

								// first we try and get the X-Pingback HTTP header
								// http://www.hixie.ch/specs/pingback/pingback
								string pingbackService = response.GetResponseHeader("X-Pingback");

								// if we don't get the header
								// try and autodetect the auto pingback info
								if ( pingbackService == null && pingbackService.Length == 0 )
								{
									Regex regex = new Regex("<link rel=\"pingback\" href=\"([^\"]+)\" ?/?>", RegexOptions.IgnoreCase);
									string[] split = regex.Split(requestBody);

									if (split.Length == 1)
										pingbackService = split[0];
								}
								
								if ( pingbackService != null && pingbackService.Length > 0 )
								{
									Pingback(job.info.SourceUrl,pingbackService,url, job.entry.Title);
								}
							}
						}
						catch(Exception e)
						{
							ErrorTrace.Trace(TraceLevel.Error,e);
							if ( loggingService != null )
							{
								loggingService.AddEvent(
									new EventDataItem(EventCodes.Error,
									e.ToString().Replace("\n","<br>"),
									"PingbackWorker, auto-discovery of: " + url));
							}
						}
					}
				}
			}
		}

		private class TrackbackJob
		{
			internal TrackbackInfo info;
			internal Entry entry;
            
			internal TrackbackJob( TrackbackInfo info, Entry entry )
			{
				this.info = info;
				this.entry = entry;
			}
		}

		protected string GetTrackbackLink(string pageBody)
		{		
			string sPattern = @"<rdf:\w+\s[^>]*?>(</rdf:rdf>)?";
			Regex anchors = new Regex(sPattern,RegexOptions.IgnoreCase);
			
			foreach(Match match in anchors.Matches(pageBody)) 
			{ 
				string pattern = "trackback:ping=\"(?<url>[^\"]+)\"";
				Regex anchor = new Regex(pattern, RegexOptions.IgnoreCase);

				Match m = anchor.Match(match.Value);
				if (m.Groups["url"].Value != "")
				{
					Uri trackBacklink = new Uri( m.Groups["url"].Value );
					if ( trackBacklink.Scheme == Uri.UriSchemeHttp)
						return trackBacklink.ToString();
				}
			}

			return null;
		}

		protected void TrackbackWorker( object argument )
		{
			TrackbackJob job = argument as TrackbackJob;
			try
			{
				
				string trackbackUrl = job.info.TargetUrl;

				if ( trackbackUrl != null &&
					trackbackUrl.Length > 0 )
				{
					string trackbackMsg = "url=" + HttpUtility.UrlEncode(job.info.SourceUrl); 
					if ( job.info.SourceTitle != null && job.info.SourceTitle.Length > 0 )
					{
						trackbackMsg += 
							"&title=" + HttpUtility.UrlEncode(job.info.SourceTitle.Length>80?job.info.SourceTitle.Substring(0,80):job.info.SourceTitle);
					}
					
					if ( job.info.SourceExcerpt != null && job.info.SourceExcerpt.Length > 0 )
					{
						trackbackMsg += "&excerpt=" + HttpUtility.UrlEncode(job.info.SourceExcerpt.Length>80?job.info.SourceExcerpt.Substring(0,80):job.info.SourceExcerpt);
					}

					trackbackMsg += "&blog_name=" + HttpUtility.UrlEncode(job.info.SourceBlogName);

					WebRequest request = WebRequest.Create( new Uri( trackbackUrl ) );
					
					request.Method = "POST";
					request.ContentType = "application/x-www-form-urlencoded";
					
					using (StreamWriter requestWriter = new StreamWriter(request.GetRequestStream()))
					{
						requestWriter.Write(trackbackMsg);
					}
					
					using(request.GetResponse())
					{
					}

					this.loggingService.AddEvent(
						new EventDataItem(
						EventCodes.TrackbackSent,
						job.entry.Title,
						job.info.SourceUrl,
						job.info.TargetUrl));
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(
						EventCodes.TrackbackServerError,
						e.ToString().Replace("\n","<br>"),
						job.info.SourceUrl,
						job.info.TargetUrl,
						job.entry.Title));
				}
			}
		}

		private class CrosspostJob
		{
			internal object info;
			internal Entry entry;
			internal IBlogDataService dataService;
            
			internal CrosspostJob( object info, Entry entry, IBlogDataService dataService )
			{
				this.info = info;
				this.entry = entry;
				this.dataService = dataService;
			}
		}


		protected void HandleCrosspost( CrosspostInfo ci, Entry entry )
		{
			try
			{
				BloggerAPIClientProxy proxy = new BloggerAPIClientProxy();
				UriBuilder uriBuilder = new UriBuilder("http",ci.Site.HostName,ci.Site.Port,ci.Site.Endpoint);
				proxy.Url = uriBuilder.ToString();
				proxy.UserAgent="newtelligence dasBlog/1.4";

				if ( ci.IsAlreadyPosted )
				{
					try
					{
						if ( ci.Site.ApiType == "metaweblog" )
						{
							mwPost existingPost = new mwPost();
							existingPost.link ="";
							existingPost.permalink="";
							existingPost.categories = ci.Categories.Split(';');
							existingPost.postid = ci.TargetEntryId;
							existingPost.dateCreated = entry.CreatedUtc;
							existingPost.title = entry.Title;
							existingPost.description = entry.Content + ci.GetTrackingSnippet(entry.EntryId);

							proxy.metaweblog_editPost(ci.TargetEntryId,ci.Site.Username,ci.Site.Password,existingPost,true);

							Crosspost cp = new Crosspost();
							cp.TargetEntryId = ci.TargetEntryId;
							cp.ProfileName = ci.Site.ProfileName;
							cp.Categories = ci.Categories;
							entry.Crossposts.Add( cp );

						}
						else if ( ci.Site.ApiType == "blogger" )
						{
							proxy.blogger_editPost("",ci.TargetEntryId,ci.Site.Username,ci.Site.Password,entry.Content+ci.GetTrackingSnippet(entry.EntryId),true);

							Crosspost cp = new Crosspost();
							cp.TargetEntryId = ci.TargetEntryId;
							cp.ProfileName = ci.Site.ProfileName;
							entry.Crossposts.Add( cp );        
						}
	
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.CrosspostChanged, ci.Site.HostName, null)); 
						}
                        
					}
					catch( XmlRpcFaultException xrfe )
					{
						ErrorTrace.Trace(TraceLevel.Error,xrfe);
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								xrfe.Message,
								String.Format("Updating cross-post entry {0} on {1}; Failed with server-fault code, {2} \"{3}\"",ci.TargetEntryId,ci.Site.ProfileName,xrfe.FaultCode, xrfe.FaultString)));
						}
					}
					catch(Exception e)
					{
						ErrorTrace.Trace(TraceLevel.Error,e);
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								e.ToString().Replace("\n","<br>"),
								String.Format("Updating cross-post entry {0} on {1}",ci.TargetEntryId,ci.Site.ProfileName)));
						}
					}
				}
				else
				{
					try
					{

						if ( ci.Site.ApiType == "metaweblog" )
						{
							mwPost newPost = new mwPost();
							newPost.link ="";
							newPost.permalink="";
							newPost.postid="";
							newPost.categories = ci.Categories.Split(';');
							newPost.dateCreated = entry.CreatedUtc;
							newPost.description = entry.Content+ci.GetTrackingSnippet(entry.EntryId);
							newPost.title = entry.Title;
							newPost.postid = proxy.metaweblog_newPost(ci.Site.BlogId,
								ci.Site.Username,
								ci.Site.Password,
								newPost, true);
							Crosspost cp = new Crosspost();
							cp.TargetEntryId = newPost.postid;
							cp.ProfileName = ci.Site.ProfileName;
							cp.Categories = ci.Categories;
							entry.Crossposts.Add( cp );                    
						}
						else if ( ci.Site.ApiType == "blogger" )
						{
							Crosspost cp = new Crosspost();
							cp.TargetEntryId = proxy.blogger_newPost("",ci.Site.BlogId,ci.Site.Username,ci.Site.Password,entry.Content+ci.GetTrackingSnippet(entry.EntryId),true);
							cp.ProfileName = ci.Site.ProfileName;
							entry.Crossposts.Add( cp );
						}

						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.CrosspostAdded, ci.Site.HostName, null)); 
						}
					}
					catch( XmlRpcFaultException xrfe )
					{
						ErrorTrace.Trace(TraceLevel.Error,xrfe);
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								xrfe.Message,
								String.Format("Adding cross-post entry to {0}; Failed with server-fault code, {1} \"{2}\"",ci.Site.ProfileName,xrfe.FaultCode, xrfe.FaultString)));
						}
					}
					catch(Exception e)
					{
						ErrorTrace.Trace(TraceLevel.Error,e);
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								e.ToString().Replace("\n","<br>"),
								String.Format("Adding cross-post entry to {0}", ci.Site.ProfileName)));
						}
					}
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.Error,
						e.ToString().Replace("\n","<br>"),
						String.Format("HandleCrosspost to {0}",ci.Site.ProfileName)));
				}
			}
            
		}

		protected void CrosspostWorker( object argument )
		{
			CrosspostJob job = argument as CrosspostJob;
			try
			{
				if  ( job.info is CrosspostInfoCollection )
				{
					foreach( CrosspostInfo ci in job.info as CrosspostInfoCollection )
					{
						HandleCrosspost( ci, job.entry );
					}
				}
				else if ( job.info is CrosspostInfo )
				{
					HandleCrosspost( job.info as CrosspostInfo, job.entry );
				}

				job.dataService.SaveEntry(job.entry);
				
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.Error,
						e.ToString().Replace("\n","<br>"),
						"CrosspostWorker"));
				}
			}
		}

		protected Entry InternalGetEntry( string entryId )
		{
			Entry entryResult = null;
			DayEntry day;

			DateTime foundDate = GetDateForEntry( entryId );
			if ( foundDate == DateTime.MinValue)
			{
				entryResult = null;
			}
			else
			{
				day = InternalGetDayEntry( foundDate );

				if (day.Entries.ContainsKey(entryId))
				{
					entryResult = day.Entries[entryId];
				}

				// entryId not found, so find by title
				if (entryResult == null)
				{
					foreach (Entry entry in day.Entries)
					{
						string compressedTitle = entry.CompressedTitle.Replace("+", "");

						if (CaseInsensitiveComparer.Default.Compare(compressedTitle,entryId) == 0)
						{
							entryResult = entry;
							break;
						}
					}
				}
				
				/* OmarS: this code is no longer necessary because DasBlogUpgrader fixes entries
				if (entryResult == null)
				{
					// Some times the entries will be in the wrong file
					// due to dateTime issues that were caused by an older
					// version of dasBlog
					// TODO: need a way to regenerate all the /content files on next upgrade.
					DayEntry earlierDay = InternalGetDayEntry( foundDate.AddDays(-1));
					foreach (Entry entry in earlierDay.Entries)
					{
						if (earlierDay.Entries.ContainsKey(entry.EntryId))
						{
							return earlierDay.Entries[entry.EntryId];
						}
					}
				}
				*/
			}

			// Don't return entries where IsPublic is false
			// unless the user is in the "admin" role.
			if( (entryResult != null) 
				&& (!entryResult.IsPublic)
				&& !Thread.CurrentPrincipal.IsInRole("admin"))
			{
				entryResult = null;
			}

			return entryResult;
		}

		/// <summary>
		/// Returns the Entry for a given entryId. 
		/// </summary>
		/// <param name="entryId"></param>
		/// <returns></returns>
		Entry IBlogDataService.GetEntry( string entryId )
		{
			return InternalGetEntry(entryId);
		}

//		/// <summary>
//		/// Returns the Entry for a given entryId. 
//		/// </summary>
//		/// <param name="entryId"></param>
//		/// <returns></returns>
//		string IBlogDataService.GetEntryTitle( string entryId )
//		{
//			EntryIdCache ecache = GetEntryIdCache();
//			string title = ecache.GetTitleFromEntryId(entryId);
//			return title;
//		}

		/// <summary>
		/// Returns a copy of the Entry for a given entryId. You must Save the Entry for changes to be
		/// reflected in the Runtime.
		/// </summary>
		/// <param name="entryId"></param>
		/// <returns></returns>
		Entry IBlogDataService.GetEntryForEdit( string entryId )
		{
			return InternalGetEntry(entryId).Clone();
		}

		/// <summary>
		/// Returns an EntryCollection whose entries all fit the criteria
		/// specified by include.
		/// </summary>
		/// <param name="dayEntryCriteria">A delegate that specifies which days should be included.</param>
		/// <param name="entryCriteria">A delegate that specifies which entries should be included.</param>
		/// <param name="maxDays">The maximum number of days to include.</param>
		/// <param name="maxEntries">The maximum number of entries to return.</param>
		/// <returns></returns>
		public EntryCollection /*IBlogDataService*/ GetEntries(
			DayEntryCollection.CriteriaHandler dayEntryCriteria, 
			EntryCollection.CriteriaHandler entryCriteria, 
			int maxDays, int maxEntries)
		{
			EntryCollection entries = new EntryCollection();
			DayEntryCollection days;
			int entryCount = 0;

			days = InternalGetDayEntries( maxDays, dayEntryCriteria);

			foreach (DayEntry day in days)
			{
				day.Load(data);
				
				foreach (Entry entry in day.GetEntries(entryCriteria))
				{
					if(entryCount < maxEntries)
					{
						entries.Add( entry );
						entryCount++;
					}
					else
					{
						break;
					}
				}
				if ( entryCount >= maxEntries )
				{
					break;     
				}
			}
			return entries;
		}

		/// <summary>
		/// Gets a collection of at most <paramref name="maxEntries"/> <see cref="newtelligence.DasBlog.Runtime.Entry"/> 
		/// structures for dates starting at the <paramref name="startDateUtc"/> 
		/// backwards for at most <paramref name="maxDays"/>. 
		/// The collection can optionally be  
		/// filtered by a categoryName.
		/// </summary>
		/// <param name="startDateUtc">UTC normalized date at which to start collecting DayEntry structures. See remarks.</param>
		/// <param name="maxDays">Maximum number of days to return. This number relates to
		/// days actually found and not to calendar days.</param>
		/// <param name="maxEntries"></param>
		/// <param name="categoryName">Optional category filter. May be empty or null.</param>
		/// <returns></returns>
		/// <remarks>
		///     The start date is expressed as a date relative to the UTC timezone and is normalized to
		///     UTC 0000 hrs. The TimeZone passed to this method serves to offset UTC into display time.
		///     
		/// </remarks>
		// TODO:  Consider refactoring to use InternalGetDayEntries that takes delegates.
		EntryCollection IBlogDataService.GetEntriesForDay(
			DateTime startDateUtc, TimeZone tz, string acceptLanguages, int maxDays, int maxEntries, string categoryName)
		{
			EntryCollection entries;
			EntryCollection.CriteriaHandler entryCriteria = null;
                     


			// the entry is only eligible if its timezone time is within start date or earlier 
			//entryCriteria += EntryCollection.CriteriaHandlerFactory.OccursBetween(
			//	tz, startDateUtc.Date, startDateUtc.AddDays(1).AddSeconds(-1) );
			if ( categoryName != null && 
				categoryName.Length > 0 )
			{
				entryCriteria += EntryCollection.CriteriaHandlerFactory.IsInCategory(categoryName);
			}	

			if( acceptLanguages != null && acceptLanguages.Length > 0)
			{
				entryCriteria += EntryCollection.CriteriaHandlerFactory.IsInAcceptedLanguagesOrMultiLingual(acceptLanguages);
			}

			entries = GetEntries(
				DayEntryCollection.CriteriaHandlerFactory.OccursBefore(startDateUtc.Date.AddDays(1)),
				entryCriteria,
				maxDays, maxEntries);

			return entries;
		}


		EntryCollection IBlogDataService.GetEntriesForMonth( 
			DateTime month, TimeZone timeZone, string acceptLanguages) 
		{
			EntryCollection entries;
			EntryCollection.CriteriaHandler entryCriteria = null;
			int daysInMonth;

			// The number of days in the month is equivalent to the last day of the month.
			daysInMonth = (new DateTime(month.Year, month.Month, 1, 0, 0, 0).AddMonths(1).AddSeconds(-1)).Day;

			// the entry is only eligible if its timezone time is within start date or earlier 
			entryCriteria += EntryCollection.CriteriaHandlerFactory.OccursInMonth(
				timeZone, month);

			if( acceptLanguages != null && acceptLanguages.Length > 0)
			{
				entryCriteria += EntryCollection.CriteriaHandlerFactory.IsInAcceptedLanguagesOrMultiLingual(acceptLanguages);
			}
					
			// TODO:  In theory it should be unnecessary to specify the maxDays because there cannot be more than one
			// DayEntry per day but it existed in previous code so .  Verify and then remove.
			entries = GetEntries(DayEntryCollection.CriteriaHandlerFactory.OccursInMonth(timeZone, month),
				entryCriteria, daysInMonth, int.MaxValue);

			return entries;
		}

		// TODO:  Consider refactoring to use InternalGetDayEntries that takes delegates.  It is slightly more
		// complicated because this method uses CategoryCache().
		EntryCollection IBlogDataService.GetEntriesForCategory( string categoryName, string acceptLanguages )
		{
			CategoryCache cache = new CategoryCache();
			cache.Ensure(data);

			EntryCollection entryList = new EntryCollection();
			Entry entry;

			CategoryCacheEntry catEntry = cache.Entries[categoryName];
			if(catEntry != null)
			{
				foreach (CategoryCacheEntryDetail detail in catEntry.EntryDetails)
				{
					DayEntry day = data.Days[detail.DayDateUtc];
					if(day != null)
					{
						EntryCollection.CriteriaHandler entryCriteria = null;

						if(acceptLanguages != null && acceptLanguages.Length > 0)
						{
							entryCriteria += EntryCollection.CriteriaHandlerFactory.IsInAcceptedLanguagesOrMultiLingual(acceptLanguages);
						}


						day.Load(data);
						entry = day.GetEntries(entryCriteria)[detail.EntryId];
						if(entry != null)
						{
							entryList.Add(entry);
						}
					}
				}
			}
			entryList.Sort(new EntrySorter());
			return entryList;
		}

		DateTime[] IBlogDataService.GetDaysWithEntries(TimeZone tz)
		{
			EntryIdCache idCache = GetEntryIdCache();
			ArrayList dayList = new ArrayList();

			foreach( EntryIdCacheEntry entry in idCache.Entries )
			{
				DateTime tzTime = tz.ToLocalTime( entry.DateUtc ).Date;
				if ( !dayList.Contains( tzTime ) )
				{
					dayList.Add( tzTime );
				}
			}
			return dayList.ToArray( typeof(DateTime) ) as DateTime[];
		}

		void IBlogDataService.DeleteEntry( string entryId, CrosspostSiteCollection crosspostSites )
		{
			DateTime foundDate = GetDateForEntry( entryId );
			if ( foundDate == DateTime.MinValue)
				return;            

			DayEntry day = InternalGetDayEntry( foundDate );
			Entry currentEntry = day.Entries[entryId];

			if (currentEntry != null)
			{
				if (crosspostSites != null)
				{
					foreach( Crosspost cp in currentEntry.Crossposts )
					{
						foreach( CrosspostSite site in crosspostSites )
						{
							if ( site.ProfileName == cp.ProfileName )
							{
								try
								{
									BloggerAPIClientProxy proxy = new BloggerAPIClientProxy();
									UriBuilder uriBuilder = new UriBuilder("http",site.HostName,site.Port,site.Endpoint);
									proxy.Url = uriBuilder.ToString();
									proxy.UserAgent="newtelligence dasBlog/1.4";

									proxy.blogger_deletePost("",cp.TargetEntryId,site.Username,site.Password,true);

									if ( loggingService != null )
									{
										loggingService.AddEvent(
											new EventDataItem(EventCodes.CrosspostDeleted, currentEntry.Title, site.ProfileName)); 
									}
								}
								catch( XmlRpcFaultException xrfe )
								{
									ErrorTrace.Trace(TraceLevel.Error,xrfe);
									if ( loggingService != null )
									{
										loggingService.AddEvent(
											new EventDataItem(EventCodes.Error,
											xrfe.Message,
											String.Format("Deleting cross-post entry {0} on {1}; Failed with server-fault code, {2} \"{3}\"",cp.TargetEntryId,cp.ProfileName,xrfe.FaultCode, xrfe.FaultString)));
									}
								}
								catch(Exception e)
								{
									ErrorTrace.Trace(TraceLevel.Error,e);
									if ( loggingService != null )
									{
										loggingService.AddEvent(
											new EventDataItem(EventCodes.Error,
											e.ToString().Replace("\n","<br>"),
											String.Format("Deleting cross-post entry {0} from {1}",cp.TargetEntryId,cp.ProfileName)));
									}
								}
								break;
							}
                     
						}
					}
				}

				day.Entries.Remove( currentEntry );
			}

			day.Save( data );
		}

		EntrySaveState IBlogDataService.SaveEntry( Entry entry, params object[] trackingInfos )
		{
			bool found=false;
            
			if ( entry.EntryId == null || entry.EntryId.Length == 0)
				return EntrySaveState.Failed;

			DayEntry day = InternalGetDayEntry( entry.CreatedUtc.Date );
			Entry currentEntry = day.Entries[entry.EntryId];

			// OmarS: now that all the entries are returned from a cache, and not desrialized
			// for each request, users who call GetEntry() will get the current entry in the runtime.
			// That entry can be modified freely, and changes will not be commited till day.Save()
			// is called. However, since they have made changes, and passed in that entry, currentEntry
			// and entry are the same objects (reference the same object) and now the changes are in the day
			// but they haven't been saved. This can cause weird problems, and the runtime may have data
			// that is not commited, and it will be lost if day.Save() is never called.
			if (entry == null)
			{
				throw new ArgumentNullException("Entry is null");
			}

			if (entry.Equals(currentEntry))
			{
				throw new ArgumentException("You have modified an existing entry and are passing that in. You need to call GetEditableEntry to get a copy of the entry before modifying it");
			}

			// we need to check to see if the two objects are equal so that we avoid trasing
			// data like Crossposts.Clear() which will remove the crosspostInfo from both entries
			if(currentEntry != null && !currentEntry.Equals(entry))
			{
				// we will only change the mod date if there has been a change to a few things
				if (currentEntry.CompareTo(entry) == 1)
				{
					currentEntry.ModifiedUtc = DateTime.Now.ToUniversalTime();
				}

				currentEntry.Categories = entry.Categories;
				currentEntry.Content = entry.Content;
				currentEntry.CreatedUtc = entry.CreatedUtc;
				currentEntry.Description = entry.Description;
				currentEntry.anyAttributes = entry.anyAttributes;
				currentEntry.anyElements = entry.anyElements;
				
				currentEntry.Author = entry.Author;
				currentEntry.IsPublic = entry.IsPublic;
				currentEntry.Language = entry.Language;
				currentEntry.AllowComments = entry.AllowComments;
				currentEntry.Link = entry.Link;
				currentEntry.ShowOnFrontPage = entry.ShowOnFrontPage;
				currentEntry.Title = entry.Title;
				
				currentEntry.Crossposts.Clear();
				currentEntry.Crossposts.AddRange(entry.Crossposts);
				currentEntry.Attachments.Clear();
				currentEntry.Attachments.AddRange(entry.Attachments);

				day.Save(data);
				data.lastEntryUpdate = currentEntry.ModifiedUtc;
				data.IncrementEntryChange();
				found = true;
			}
			else		
			{
				day.Entries.Add(entry);
				day.Save(data);
				data.lastEntryUpdate = entry.CreatedUtc;
				data.IncrementEntryChange();
				found = false;
			}

			if (trackingInfos != null)
			{
				foreach( object trackingInfo in trackingInfos )
				{
					if (trackingInfo != null)
					{
						if ( trackingInfo is WeblogUpdatePingInfo )
						{
							ThreadPool.QueueUserWorkItem(
								new WaitCallback(this.PingWeblogsWorker), 
								(WeblogUpdatePingInfo)trackingInfo);
						}
						else if ( trackingInfo is PingbackInfo )
						{
							PingbackJob pingbackJob	 = new PingbackJob((PingbackInfo)trackingInfo, entry);
                    
							ThreadPool.QueueUserWorkItem(
								new WaitCallback(this.PingbackWorker), 
								pingbackJob);
						}
						else if ( trackingInfo is PingbackInfoCollection )
						{
							PingbackInfoCollection pic = trackingInfo as PingbackInfoCollection;
							foreach( PingbackInfo pi in pic )
							{
								PingbackJob pingbackJob	 = new PingbackJob(pi, entry);
                   
								ThreadPool.QueueUserWorkItem(
									new WaitCallback(this.PingbackWorker), 
									pingbackJob);
							}
						}
						else if ( trackingInfo is TrackbackInfo )
						{
							ThreadPool.QueueUserWorkItem(
								new WaitCallback(this.TrackbackWorker), 
								new TrackbackJob((TrackbackInfo)trackingInfo,entry));
						}
						else if ( trackingInfo is TrackbackInfoCollection )
						{
							TrackbackInfoCollection tic = trackingInfo as TrackbackInfoCollection;
							foreach( TrackbackInfo ti in tic )
							{
								ThreadPool.QueueUserWorkItem(
									new WaitCallback(this.TrackbackWorker), 
									new TrackbackJob(ti,entry));
							}
						}
						else if ( trackingInfo is CrosspostInfo ||
							trackingInfo is CrosspostInfoCollection )
						{
							ThreadPool.QueueUserWorkItem(
								new WaitCallback(this.CrosspostWorker), 
								new CrosspostJob(trackingInfo,entry,this));
						}
					}
				}
			}
			return found?EntrySaveState.Updated:EntrySaveState.Added;
		}
        
		CategoryCacheEntryCollection IBlogDataService.GetCategories()
		{
			CategoryCacheEntryCollection result;
			CategoryCache cache = new CategoryCache();
			cache.Ensure(data);
			if(Thread.CurrentPrincipal.IsInRole("admin"))
			{
				result = cache.Entries;
			}
			else
			{
				result = new CategoryCacheEntryCollection();
				foreach(CategoryCacheEntry category in cache.Entries)
				{
					if(category.IsPublic)
					{
						result.Add(category);
					}
				}
			}
			return result;
		}
        
		private void InternalAddTracking( Tracking tracking )
		{
			bool trackFound = false;
            
			Entry entry = InternalGetEntry( tracking.TargetEntryId );
            
			if ( entry == null )
			{
				StackTrace st = new StackTrace();
				string logtext = String.Format("InternalAddTracking: Entry not found: {0}, {1}, {2} {3}",
					tracking.TrackingType, tracking.TargetTitle, tracking.TargetEntryId, st.ToString());
				this.loggingService.AddEvent(
					new EventDataItem(EventCodes.Error, logtext, ""));
				return;
			}

			DayExtra extra = InternalGetDayExtra( entry.CreatedUtc );

			if (extra == null)
			{
				StackTrace st = new StackTrace();
				string logtext = String.Format("InternalAddTracking: DayExtra not found: {0}, {1}, {2}, {3} {4}",
					tracking.TrackingType, tracking.TargetTitle, tracking.TargetEntryId, entry.CreatedUtc, st.ToString());
				this.loggingService.AddEvent(
					new EventDataItem(EventCodes.Error, logtext, ""));
				return;
			}

			foreach( Tracking trk in extra.Trackings )
			{
				if ( trk.PermaLink == tracking.PermaLink && 
					trk.TargetEntryId.ToUpper() == tracking.TargetEntryId.ToUpper() )
				{
					trackFound = true;
					break;
				}
			}

			if ( !trackFound )
			{				
				tracking.TargetTitle = entry.Title;
				extra.Trackings.Add(tracking);
				extra.Save(data);
				data.IncrementExtraChange();
			}
		}

		private void TrackingHandler( )
		{
			while ( true )
			{
				Tracking tracking;

				// block the thread from entering the next loop till trackingQueueEvent.Set() is called
				trackingQueueEvent.WaitOne();
				while ( trackingQueue.Count != 0 )
				{
					try
					{
						lock( trackingQueue.SyncRoot )
						{
							tracking = trackingQueue.Dequeue() as Tracking;
						}
						if ( tracking != null )
						{
							InternalAddTracking( tracking );
						}

						if ( trackingQueue.Count == 0 )
						{
							break;
						}
					}
					catch (InvalidOperationException ex)
					{
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								ex.ToString().Replace("\n","<br>"),
								"Dequeue from TrackingHandler"));
						}
					}
					catch(Exception ex)
					{
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								ex.ToString().Replace("\n","<br>"),
								"Unhandled Exception from TrackingHandler"));
						}
					}
				}
			}
		}

		private void InternalSendMail( SendMailInfo info )
		{
			try
			{
				SmtpMail.SmtpServer = info.SmtpServer;
				SmtpMail.Send( info.Message );            
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
				if ( loggingService != null )
				{
					loggingService.AddEvent(
						new EventDataItem(EventCodes.Error,
						e.ToString().Replace("\n","<br>"),
						"InternalSendMail"));
				}
			}
		}

		private void SendMailHandler(  )
		{
			while ( true )
			{
				SendMailInfo sendMailInfo;

				// block the thread from entering the next loop till trackingQueueEvent.Set() is called
				sendMailInfoQueueEvent.WaitOne();
				while ( sendMailInfoQueue.Count != 0 )
				{
					try
					{
						lock( sendMailInfoQueue.SyncRoot )
						{
							sendMailInfo = sendMailInfoQueue.Dequeue() as SendMailInfo;
						}
						if ( sendMailInfo != null )
						{
							InternalSendMail( sendMailInfo );
						}

						if ( sendMailInfoQueue.Count == 0 )
						{
							break;
						}
					}
					catch (Exception e)
					{
						ErrorTrace.Trace(TraceLevel.Error,e);
						if ( loggingService != null )
						{
							loggingService.AddEvent(
								new EventDataItem(EventCodes.Error,
								e.ToString().Replace("\n","<br>"),
								"InternalSendMail from SendMailHandler"));
						}
					}
				}
			}
		}

		void IBlogDataService.AddTracking( Tracking tracking, params object[] actions )
		{
			if ( actions != null )
			{
				foreach( object action in actions )
				{
					if ( action is SendMailInfo )
					{
						lock( sendMailInfoQueue.SyncRoot )
						{
							sendMailInfoQueue.Enqueue( action );
						}
						sendMailInfoQueueEvent.Set();
					}
				}
			}

			lock( trackingQueue.SyncRoot )
			{
				trackingQueue.Enqueue(tracking);
			}
			trackingQueueEvent.Set();
		}

		TrackingCollection IBlogDataService.GetTrackingsFor( string entryId )
		{
			TrackingCollection trackingsForEntry = new TrackingCollection();
			DateTime date = GetDateForEntry( entryId );
			if ( date == DateTime.MinValue )
				return trackingsForEntry;

			DayExtra extra = data.GetDayExtra( date );
			foreach( Tracking trk in extra.Trackings )
			{
				if ( trk.TargetEntryId.ToUpper() == entryId.ToUpper() )
				{
					trackingsForEntry.Add( trk );
				}
			}
			return trackingsForEntry;
		}

		void IBlogDataService.AddComment(Comment comment, params object [] actions )
		{
			DateTime date = GetDateForEntry( comment.TargetEntryId );
			if ( date == DateTime.MinValue )
				return;

			if ( actions != null )
			{
				foreach( object action in actions )
				{
					if ( action is SendMailInfo )
					{
						sendMailInfoQueue.Enqueue( action );
						sendMailInfoQueueEvent.Set();
					}
				}
			}
			
			data.lastCommentUpdate = comment.CreatedUtc;
			DayExtra extra = data.GetDayExtra( date );
			extra.Comments.Add(comment);
			extra.Save(data);
			data.IncrementExtraChange();
		}

		void IBlogDataService.DeleteComment( string entryid, string commentid )
		{
			DateTime date = GetDateForEntry( entryid );
			if ( date == DateTime.MinValue )
				return;

			DayExtra extra = data.GetDayExtra( date );
			foreach (Comment c in extra.Comments)
			{
				if (c.EntryId == commentid)
				{
					extra.Comments.Remove(c);
					break;
				}
			}
			extra.Save(data);
			data.IncrementExtraChange();
		}

		CommentCollection IBlogDataService.GetCommentsFor( string entryId )
		{
			return InternalGetCommentsFor(entryId);
		}

		CommentCollection InternalGetCommentsFor (string entryId)
		{
			CommentCollection commentsForEntry = new CommentCollection();
			DateTime date = GetDateForEntry( entryId );
			if ( date == DateTime.MinValue )
				return commentsForEntry;

			DayExtra extra = data.GetDayExtra( date );
			foreach( Comment cm in extra.Comments )
			{
				if ( cm.TargetEntryId.ToUpper() == entryId.ToUpper() )
				{
					commentsForEntry.Add( cm );
				}
			}
			return commentsForEntry;
		}

		CommentCollection IBlogDataService.GetAllComments()
		{
			CommentCollection _com = null;
			try 
			{
				XmlSerializer _ser = new XmlSerializer(typeof(CommentCollection));
				using (StreamReader reader = new StreamReader(Path.Combine(contentBaseDirectory,"AllComments.xml"))) 
				{
					_com = (CommentCollection)_ser.Deserialize(reader);
				}
			}
			catch(Exception e)
			{
				ErrorTrace.Trace(TraceLevel.Error,e);
			}
			if (_com == null) 
			{
				// recreate the AllComments.xml file.
				_com = new CommentCollection();

				foreach(DayEntry day in this.data.Days)
				{
					foreach(Entry entry in day.Entries)
					{
						CommentCollection comments = this.InternalGetCommentsFor(entry.EntryId);
						foreach (Comment comment in comments)
						{
							if( _com.IndexOf(comment) < 0)
							{
								_com.Add(comment);
							}
						}
					}					
				}
			}
			
			XmlSerializer commentSerializer = new XmlSerializer(typeof(CommentCollection),Data.NamespaceURI);
			string fileName = data.ResolvePath("AllComments.xml");
			FileStream fileStream = FileUtils.OpenForReadWrite(fileName);
			
			if (fileStream != null)
			{
				using(StreamWriter writer = new StreamWriter(fileStream))
				{
					commentSerializer.Serialize(writer, _com);
				}

				fileStream.Close();
			}
		
			return _com;
		}

		/// <summary>
		/// The DateTime of the last modified or created post.
		/// </summary>
		/// <returns>DateTime of the last entry modification in UTC </returns>
		DateTime IBlogDataService.GetLastEntryUpdate()
		{
			return data.lastEntryUpdate;
		}

		/// <summary>
		/// This DateTime of the most recent comment entry
		/// </summary>
		/// <returns>DateTime of the last comment entry in UTC</returns>
		DateTime IBlogDataService.GetLastCommentUpdate()
		{
			if (data.lastCommentUpdate == DateTime.MinValue)
			{
				FileInfo fileInfo = new FileInfo(Path.Combine(contentBaseDirectory,"AllComments.xml"));
				data.lastCommentUpdate = fileInfo.LastWriteTimeUtc;
			}

			return data.lastCommentUpdate;
		}
	}   
}
