#region Copyright (c) 2003, newtelligence AG. All rights reserved.
/*
// Copyright (c) 2003, newtelligence AG. (http://www.newtelligence.com)
// Original BlogX Source Code: Copyright (c) 2003, Chris Anderson (http://simplegeek.com)
// All rights reserved.
//  
// Redistribution and use in source and binary forms, with or without modification, are permitted 
// provided that the following conditions are met: 
//  
// (1) Redistributions of source code must retain the above copyright notice, this list of 
// conditions and the following disclaimer. 
// (2) Redistributions in binary form must reproduce the above copyright notice, this list of 
// conditions and the following disclaimer in the documentation and/or other materials 
// provided with the distribution. 
// (3) Neither the name of the newtelligence AG nor the names of its contributors may be used 
// to endorse or promote products derived from this software without specific prior 
// written permission.
//      
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS 
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY 
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER 
// IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 
// OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// -------------------------------------------------------------------------
//
// Original BlogX source code (c) 2003 by Chris Anderson (http://simplegeek.com)
// 
// newtelligence is a registered trademark of newtelligence Aktiengesellschaft.
// 
// For portions of this software, the some additional copyright notices may apply 
// which can either be found in the license.txt file included in the source distribution
// or following this notice. 
//
*/
#endregion


using System;
using System.IO;
using System.Collections;
using System.Xml;
using System.Web;
using System.Xml.Serialization;
using System.Threading;
using newtelligence.DasBlog.Util;

namespace newtelligence.DasBlog.Runtime
{
    [XmlRoot(Namespace=Data.NamespaceURI)]
    [XmlType(Namespace=Data.NamespaceURI)]
    public class CategoryCache
    {
        internal class CategorySorter : IComparer
        {
            int IComparer.Compare( object x, object y )
            {
                CategoryCacheEntry entryX = x as CategoryCacheEntry;
                CategoryCacheEntry entryY = y as CategoryCacheEntry;
                return entryX.Name.CompareTo(entryY.Name);
            }
        }

        private static long _changeNumber;
        private static bool _booting = true;
        private static CategoryCacheEntryCollection _entries;
		private static object _entriesLock = new object();

        [XmlIgnore]
        public string FileName { get { return "categoryCache.xml"; } }
        public long ChangeNumber { get { return _changeNumber; } set { _changeNumber = value; } }
        public CategoryCacheEntryCollection Entries { get { return _entries; } set { _entries = value; } }

        [XmlAnyElement]
        public XmlElement[] anyElements;
        [XmlAnyAttribute]
        public XmlAttribute[] anyAttributes;

        internal void Ensure(DataManager data)
        {
			lock(_entriesLock)
			{
				Load(data);
				if ( _booting  || ChangeNumber != data.CurrentEntryChangeCount)
				{
					_booting = false;
					Build(data);
					Save(data);
				}
			}
        }

		internal void Build(DataManager data)
		{
			ChangeNumber = data.CurrentEntryChangeCount;
			Hashtable build = new Hashtable();
			CategoryCacheEntry categoryCacheEntry;
		
			foreach (DayEntry day in data.Days)
			{
				day.Load(data);

				foreach (Entry entry in day.Entries)
				{
					foreach (string cat in entry.GetSplitCategories())
					{
						if (!build.Contains(cat))
						{
							categoryCacheEntry = new CategoryCacheEntry();
							categoryCacheEntry.Name = cat;
							categoryCacheEntry.EntryDetails = new CategoryCacheEntryDetailCollection();
							build[cat] = categoryCacheEntry;
						}
						else
						{
							categoryCacheEntry = (CategoryCacheEntry)build[cat];
						}
						CategoryCacheEntryDetail entryDetail = new CategoryCacheEntryDetail();
						entryDetail.DayDateUtc = day.DateUtc;
						entryDetail.EntryId = entry.EntryId;

						// If we have not yet found a public entry but the
						// current entry is public then
						if(!categoryCacheEntry.IsPublic && entry.IsPublic)
						{
							categoryCacheEntry.IsPublic = true;
						}

						categoryCacheEntry.EntryDetails.Add(entryDetail);
					}
				}
			}

			_entries.Clear();
			foreach (DictionaryEntry de in build)
			{
				_entries.Add((CategoryCacheEntry)de.Value);
			}

			_entries.Sort( new CategorySorter() );
		}

		private void Load(DataManager data)
		{
			//SDH: We have no HttpContext.Current if we are in a NON HttpRequest-spawned thread, 
			// so we use the Cache with the item set as "NON-REMOVABLE"
			// Now, the Cache has no locking, while the Application does. But, we have a lock around all 
			// related operations here, so we're OK.
			//CategoryCache cc = HttpContext.Current.Application["CategoryCache"] as CategoryCache;
			/*
			 CategoryCache cc = HttpRuntime.Cache["CategoryCache"] as CategoryCache;
			if (cc != null)
			{
				this._entries = cc.Entries;
				ChangeNumber = cc.ChangeNumber;
			}
			*/

			if (_entries == null)
			{
				_entries = CategoryCacheEntryCollection.Synchronized(new CategoryCacheEntryCollection());
			}

			
			//SDH: Other than preventing a rebuild on a recycle, there's just NO REASON
			//     to store the cache on disk. We save million of read/writes on my site
			//     with this small change.

			/*
			string fullPath = data.ResolvePath(FileName);
			if (File.Exists(fullPath))
			{
				FileStream fileStream = FileUtils.OpenForRead(fullPath);
				if ( fileStream != null )
				{
					try
					{
						XmlSerializer ser = new XmlSerializer(typeof(CategoryCache),Data.NamespaceURI);
						using (StreamReader reader = new StreamReader(fileStream))
						{
							XmlNamespaceUpgradeReader upg = new XmlNamespaceUpgradeReader( reader, "", Data.NamespaceURI );
							CategoryCache cc = (CategoryCache)ser.Deserialize(upg);
							this._entries = cc.Entries;
							this._entries.Sort( new CategorySorter() );
							ChangeNumber = cc.ChangeNumber;
						}
					}
					catch(Exception exc)
					{
						ErrorTrace.Trace(System.Diagnostics.TraceLevel.Error,exc);
					}
					finally
					{
						fileStream.Close();
					}
				}
			}
			*/
		}

        private void Save(DataManager data)
        {
			//SDH: We'll sort on the Save, rather than on the Load. 
			// Saves happen less often, while Loads happen ALL THE TIME.
			// Before we'd often get a "System.InvalidOperationException: Collection was modified; enumeration operation may not execute."
			// Because I believe we were sorting the CategoryCache on one thread, while enumerating on another.

			_entries.Sort( new CategorySorter() );
			//HttpContext.Current.Application["CategoryCache"] = this;
			
			//SDH: We have no HttpContext.Current if we are in a NON HttpRequest-spawned thread, 
			// so we use the Cache with the item set as "NON-REMOVABLE"
			// Now, the Cache has no locking, while the Application does. But, we have a lock around all 
			// related operations here, so we're OK.
			
			//HttpRuntime.Cache.Insert("CategoryCache",this,null,DateTime.MaxValue,TimeSpan.Zero,System.Web.Caching.CacheItemPriority.NotRemovable,null);
			


			//SDH: Other than preventing a rebuild on a recycle, there's just NO REASON
			//     to store the cache on disk. We save million of read/writes on my site
			//     with this small change.
			
			/*
			string fullPath = data.ResolvePath(FileName);

            FileStream fileStream = FileUtils.OpenForWrite(fullPath);
            if ( fileStream != null )
            {
                try
                {
                    XmlSerializer ser = new XmlSerializer(typeof(CategoryCache),Data.NamespaceURI);
                    using (StreamWriter writer = new StreamWriter(fileStream))
                    {
                        ser.Serialize(writer, this);
                    }
                }
                catch(Exception e)
                {
                    ErrorTrace.Trace(System.Diagnostics.TraceLevel.Error,e);
                }
                finally
                {
                    fileStream.Close();
                }
            }
			*/
        }
    }
}
