﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using Celeriq.Utilities;
using System.IO;
using Celeriq.Common;
using System.Diagnostics;
using Celeriq.Utilities;
using Celeriq.QueryLog;
using System.Xml;

namespace Celeriq.RepositoryAPI
{
	internal class Repository
	{
		#region Class Members

		private const int THREAD_THRESHOLD = 100000; //Number of records needed to use threading
		private const int THREAD_CORE_COUNT = 3; //Number of processors needed to use threading
		private const bool CACHE_ON = true;

		private RepositoryDefinition _repositoryDefinition = null;
		private QueryLogger _queryLogger;

		private readonly object _globalLock = new object();
		private List<DataItemExtension> _list = null;
		private List<DimensionItem> _dimensionList = null;
		private List<FileCacheHelper<RefinementItem>> _dDimensionCache = null;
		private int _pkindex = -1; //the index in the data array of the primary key

		private Dictionary<long, List<DataItemExtension>> _dimensionMappedItemCache = null;

		private string _cacheFolder = string.Empty;
		private string _cacheFile = string.Empty;
		private long _maxRecordIndex = 1;
		private int _coreCount = 0;

		private HashSet<int> _pkList = new HashSet<int>();
		private FileCacheHelper<long> _deletedCache = null;
		private HashSet<long> _deletedList = new HashSet<long>();
		private SequencedHashTable<int, DataQueryResults> _resultsCache = null;
		private FileCacheHelper<DataItemExtension> _fileCache = null;
		private CacheControl _cacheControl = null;
		private Celeriq.Utilities.ServiceStartup _startup = null;
		private long _versionHash = 0;
		private DateTime _lastAccess = DateTime.MinValue;
		private bool _isLoaded = false;
		private List<SystemCredentials> _userList = null;
		private KeyPair _masterKeys = null;

		//Diagnostic data
		private DataItem _lastInserted = null;

		#endregion

		#region Constructors

		public Repository(Celeriq.Utilities.ServiceStartup startup)
		{
			_masterKeys = SecurityHelper.DecryptObjectFromDisk<KeyPair>(ConfigurationSettings.GetKeyFile());
			_userList = UserDomain.LoadUserFile();
			_startup = startup;
			_cacheFolder = Path.Combine(startup.RepositoryFolder, startup.RepositoryDefinition.ID.ToString());
		}

		private bool _isReloading = false;
		private void ReloadMe(Celeriq.Utilities.ServiceStartup startup)
		{
			if (_isReloading) return;
			_isReloading = true;

			try
			{
				var timer = new Stopwatch();
				timer.Start();

				//Create the data folder
				if (!Directory.Exists(_cacheFolder))
				{
					Directory.CreateDirectory(_cacheFolder);
					WriteCacheFile();
				}

				_queryLogger = new QueryLogger(_cacheFolder);
				_repositoryDefinition = startup.RepositoryDefinition;
				_versionHash = _repositoryDefinition.VersionHash;
				_pkindex = _repositoryDefinition.FieldList.IndexOf(_repositoryDefinition.PrimaryKey);

				Logger.Setup(Path.Combine(_cacheFolder, "repository.log"), typeof (Repository));
				Logger.LogInfo("Repository Data Loading Started: " + startup.RepositoryDefinition.ID.ToString());

				//Create the master list
				_list = new List<DataItemExtension>();
				_coreCount = Environment.ProcessorCount;

				_cacheControl = new CacheControl();

				//Load the dimension files
				_dDimensionCache = new List<FileCacheHelper<RefinementItem>>();
				var index = 0;
				foreach (var d in this._repositoryDefinition.DimensionList)
				{
					var f = new FileCacheHelper<RefinementItem>(Path.Combine(_cacheFolder, "d1" + index.ToString("000000") + ".data"));
					_dDimensionCache.Add(f);
					index++;
				}

				_deletedCache = new FileCacheHelper<long>(Path.Combine(_cacheFolder, "deleted.data"));
				LoadDeletedList();

				//Create the dimension lists
				_dimensionList = new List<DimensionItem>();
				index = 0;
				foreach (var d in this._repositoryDefinition.DimensionList)
				{
					_dimensionList.Add(new DimensionItem() {Name = d.Name, DIdx = DimensionDefinition.DGROUP + index, NumericBreak = d.NumericBreak});
					index++;
				}

				//Initialize the dimensions from file
				index = 0;
				foreach (var d in this._repositoryDefinition.DimensionList)
				{
					InitializeDimension(_dimensionList[index], _dDimensionCache[index]);
					index++;
				}

				//The data files
				_cacheFile = Path.Combine(_cacheFolder, "repository.data");

				_fileCache = new FileCacheHelper<DataItemExtension>(_cacheFile);

				//Load dimension/item mapping cache
				_dimensionMappedItemCache = new Dictionary<long, List<DataItemExtension>>();
				foreach (var d in _dimensionList)
				{
					foreach (var r in d.RefinementList)
					{
						_dimensionMappedItemCache.Add(r.DVIdx, new List<DataItemExtension>());
					}
				}
					
				LoadData();

				timer.Stop();
				Logger.LogInfo("Repository Data Loaded: " + startup.RepositoryDefinition.ID.ToString() + " (" + timer.ElapsedMilliseconds.ToString("###,###,###,##0") + " ms | " + _list.Count.ToString("###,###,###,##0") + " items | " + (_list.Count/(timer.ElapsedMilliseconds/1000.0)).ToString("###,###,###,##0") + " items/s)");
			}
			catch (Exception ex)
			{
				Logger.LogError("Repository Loading Error\n" + ex.ToString());
				throw;
			}
			finally
			{
				_isReloading = false;
			}
		}

		#endregion

		#region Properties

		public DateTime LastAccess
		{
			get { return _lastAccess; }
		}

		public bool IsLoaded
		{
			get { return _isLoaded; }
		}

		#endregion

		#region Unload

		public void UnloadData()
		{
			try
			{
				lock (_globalLock)
				{
					if (!_isLoaded) return;

					//Ensure all is saved
					SaveData();

					//Clear all data from memory
					_list = null;
					_dimensionList = null;
					if (_dDimensionCache != null)
					{
						_dDimensionCache.ForEach(x => (x as IDisposable).Dispose());
					}
					_dDimensionCache = null;
					_pkindex = -1;
					_dimensionMappedItemCache = null;
					_maxRecordIndex = 1;
					_coreCount = 0;
					_pkList = new HashSet<int>();
					if (_deletedCache != null)
						(_deletedCache as IDisposable).Dispose();
					_deletedCache = null;
					_deletedList = new HashSet<long>();
					_resultsCache = null;
					if (_fileCache != null)
						(_fileCache as IDisposable).Dispose();
					_fileCache = null;
					_cacheControl = null;
					_versionHash = 0;

					//Mark this repository as unloaded
					_lastAccess = DateTime.MinValue;
					_isLoaded = false;

					Logger.LogInfo("Repository Data Unloaded: " + _startup.RepositoryDefinition.ID.ToString());

				}

			}
			catch (Exception ex)
			{
				Logger.LogError(ex.ToString());
				throw;
			}
		}

		#endregion

		#region UpdateIndex

		public void UpdateIndexList(IEnumerable<DataItem> list, UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			lock (_globalLock)
			{
				if (!this.IsLoaded)
				{
					ReloadMe(_startup);
				}
			}

			try
			{
				if (list == null) return;
				if (list.Count() == 0) return;

				//Add the actual items to the master list
				foreach (var item in list)
				{
					lock (_globalLock) //lock for each operation
					{
						_lastAccess = DateTime.Now;
						var newItem = new DataItemExtension(item, _repositoryDefinition);
						if (!IsItemValid(newItem))
							throw new Exception("The item is not valid!");

						//Check to see if this item exists and if so remove it
						this.DeleteItem((int)newItem.ItemArray[_pkindex]);

						newItem.__RecordIndex = _maxRecordIndex;
						_maxRecordIndex++;
						_list.Add(newItem);
						newItem.__RecordIndex = newItem.__RecordIndex;

						_dimensionList.ForEach(x => newItem.DimensionSingularValueArray.Add(null));
						ProcessDimensions(newItem);

						_fileCache.WriteItem(newItem);
						_lastInserted = newItem;

						_pkList.Add((int)newItem.ItemArray[_pkindex]);

						//Clear cache
						_resultsCache.Clear();

						WriteCacheFile();
					}
				}
			}
			catch (Exception ex)
			{
				Logger.LogError(ex.ToString());
				throw;
			}
		}

		private long GetItemCountFromCache()
		{
			try
			{
				lock (_globalLock)
				{
					var cacheFile = Path.Combine(_cacheFolder, "repository.cache");
					if (File.Exists(cacheFile))
					{
						var document = new XmlDocument();
						document.Load(cacheFile);
						return XmlHelper.GetAttribute(document.DocumentElement, "itemcount", 0);
					}
					else
					{
						return 0;
					}
				}
			}
			catch (Exception ex)
			{
				Logger.LogError("GetItemCountFromCache Error\n" + ex.ToString());
				return 0;
			}
		}

		private void WriteCacheFile()
		{
			try
			{
				lock (_globalLock)
				{
					var cacheFile = Path.Combine(_cacheFolder, "repository.cache");
					File.WriteAllText(cacheFile, "<repository itemcount=\"" + _pkList.Count + "\"></repository>");
				}
			}
			catch (Exception ex)
			{
				Logger.LogError("WriteCacheFile Error\n" + ex.ToString());
			}
		}

		#endregion

		#region DeleteData

		public void DeleteData(IEnumerable<DataItem> list, UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			lock (_globalLock)
			{
				if (!this.IsLoaded)
				{
					ReloadMe(_startup);
				}

				list.ToList().ForEach(x => DeleteItem((int) x.ItemArray[_pkindex]));
				WriteCacheFile();
			}
		}

		private void DeleteItem(int primaryKey)
		{
			try
			{
				lock (_globalLock)
				{
					_lastAccess = DateTime.Now;
					//Check if this item is in the list
					if (!_pkList.Contains(primaryKey))
						return;

					//If this item is in the list then actually find it and remove it
					var existingItem = _list.AsParallel().FirstOrDefault(x => (int) x.ItemArray[_pkindex] == primaryKey);
					if (existingItem != null)
					{
						_list.Remove(existingItem);
						_deletedList.Add(existingItem.__RecordIndex);
						_deletedCache.WriteItem(existingItem.__RecordIndex);

						// remove the item from the _dimensions
						foreach (var val in existingItem.DimensionValueArray)
						{
							_dimensionMappedItemCache[val].Remove(existingItem);
						}
					}

					//Clear cache
					_resultsCache.Clear();
				}
			}
			catch (Exception ex)
			{
				Logger.LogError(ex.ToString());
				throw;
			}
		}

		#endregion

		#region Query

		public DataQueryResults Query(DataQuery query)
		{
			lock (_globalLock)
			{
				if (!this.IsLoaded)
				{
					ReloadMe(_startup);
				}

				if (!IsValidCredentials(query.Credentials))
					throw new Exception("Invalid credentials");

				_lastAccess = DateTime.Now;
				var newResults = new DataQueryResults();
				newResults.DimensionList = new List<DimensionItem>();
				newResults.RecordList = new List<DataItem>();
				newResults.Query = query;
				newResults.VersionHash = _versionHash;

				if (_list.Count == 0)
				{
					return newResults;
				}

				var qHash = query.GetHashCode();
				if (CACHE_ON && _resultsCache.ContainsKey(qHash))
				{
					var q = _resultsCache[qHash];
					if (DateTime.Now.Subtract(q.QueryTime).TotalSeconds < _startup.CacheLength)
					{
						_queryLogger.Log(query.ToString(), 0, q.TotalRecordCount, true);
						return q;
					}
				}

				var timer = new Stopwatch();
				timer.Start();
				var _timeList = new List<string>();
				try
				{
					var myWatch2 = new Stopwatch();

					IEnumerable<DataItemExtension> queriedList = null;

					//Apply dimension indexes first
					if (query.DimensionValueList != null && query.DimensionValueList.Count() > 0)
					{
						var filterDList = new List<DimensionItem>();
						foreach (var dvidx in query.DimensionValueList)
						{
							var dItem = _dimensionList.GetDimensionByDVIdx(dvidx);
							if (dItem != null)
							{
								var dDef = _repositoryDefinition.DimensionList.First(x => x.Name == dItem.Name);
								if (_dimensionMappedItemCache.ContainsKey(dvidx))
								{
									if (queriedList == null) queriedList = _dimensionMappedItemCache[dvidx];
									else
									{
										if (dDef.DataType == RepositoryDefinition.DataTypeConstants.List)
										{
											//List dimension refinements always INTERSECT values
											queriedList = queriedList.Intersect(_dimensionMappedItemCache[dvidx]);
										}
										else //All other dimension types
										{
											//Multiple refinements in a dimension should UNION values
											//Refinements across dimensions should INTERSECT
											if (filterDList.Contains(dItem))
												queriedList = queriedList.Union(_dimensionMappedItemCache[dvidx]);
											else
												queriedList = queriedList.Intersect(_dimensionMappedItemCache[dvidx]);
										}
									}
								}

								if (!filterDList.Contains(dItem))
									filterDList.Add(dItem);
							}
						}
					}

					if (queriedList == null) queriedList = _list;

					#region Filters
					if (query.FieldFilters != null)
					{
						//Verify there is only one Geo filter
						if (query.FieldFilters.Count(x => x is GeoCodeFieldFilter) > 1)
						{
							throw new Exception("Multiple geo location filters cannot be specified!");
						}

						foreach (var filter in query.FieldFilters)
						{
							var ff = ((ICloneable)filter).Clone() as IFieldFilter;
							var field = _repositoryDefinition.FieldList.First(x => x.Name == ff.Name);
							var fieldIndex = _repositoryDefinition.FieldList.IndexOf(field);
							#region GeoCode
							if (field.DataType == RepositoryDefinition.DataTypeConstants.GeoCode)
							{
								var geo = (GeoCodeFieldFilter)ff;
								switch (ff.Comparer)
								{
									case ComparisonConstants.LessThan:
									case ComparisonConstants.LessThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && GeoHelper.InRadius(((GeoCode)x.ItemArray[fieldIndex]).Latitude, ((GeoCode)x.ItemArray[fieldIndex]).Longitude, geo.Latitude, geo.Longitude, geo.Radius));
										break;
									case ComparisonConstants.GreaterThan:
									case ComparisonConstants.GreaterThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && GeoHelper.Calc(((GeoCode)x.ItemArray[fieldIndex]).Latitude, ((GeoCode)x.ItemArray[fieldIndex]).Longitude, geo.Latitude, geo.Longitude) >= geo.Radius);
										break;
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && GeoHelper.Calc(((GeoCode)x.ItemArray[fieldIndex]).Latitude, ((GeoCode)x.ItemArray[fieldIndex]).Longitude, geo.Latitude, geo.Longitude) == geo.Radius);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && GeoHelper.Calc(((GeoCode)x.ItemArray[fieldIndex]).Latitude, ((GeoCode)x.ItemArray[fieldIndex]).Longitude, geo.Latitude, geo.Longitude) != geo.Radius);
										break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
							#region Bool
							else if (field.DataType == RepositoryDefinition.DataTypeConstants.Bool)
							{
								switch (ff.Comparer)
								{
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (bool)x.ItemArray[fieldIndex] == (bool)ff.Value);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (bool)x.ItemArray[fieldIndex] != (bool)ff.Value);
										break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
							#region DateTime
							else if (field.DataType == RepositoryDefinition.DataTypeConstants.DateTime)
							{
								switch (ff.Comparer)
								{
									case ComparisonConstants.LessThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] < (DateTime)ff.Value);
										break;
									case ComparisonConstants.LessThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] <= (DateTime)ff.Value);
										break;
									case ComparisonConstants.GreaterThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] > (DateTime)ff.Value);
										break;
									case ComparisonConstants.GreaterThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] >= (DateTime)ff.Value);
										//queriedList = queriedList.Intersect(_list.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] >= (DateTime)ff.Value));
										//queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] >= (DateTime)ff.Value).ToList();
										break;
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] == (DateTime)ff.Value);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (DateTime)x.ItemArray[fieldIndex] != (DateTime)ff.Value);
										break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
							#region Float
							else if (field.DataType == RepositoryDefinition.DataTypeConstants.Float)
							{
								switch (ff.Comparer)
								{
									case ComparisonConstants.LessThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] < (double)ff.Value);
										break;
									case ComparisonConstants.LessThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] <= (double)ff.Value);
										break;
									case ComparisonConstants.GreaterThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] > (double)ff.Value);
										break;
									case ComparisonConstants.GreaterThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] >= (double)ff.Value);
										break;
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] == (double)ff.Value);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (double)x.ItemArray[fieldIndex] != (double)ff.Value);
										break;
									case ComparisonConstants.Between:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && ff.Value2 != null && (double)x.ItemArray[fieldIndex] >= (double)ff.Value && (double)x.ItemArray[fieldIndex] <= (double)ff.Value2);
										break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
							#region Int
							else if (field.DataType == RepositoryDefinition.DataTypeConstants.Int)
							{
								switch (ff.Comparer)
								{
									case ComparisonConstants.LessThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] < (int)ff.Value);
										break;
									case ComparisonConstants.LessThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] <= (int)ff.Value);
										break;
									case ComparisonConstants.GreaterThan:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] > (int)ff.Value);
										break;
									case ComparisonConstants.GreaterThanOrEq:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] >= (int)ff.Value);
										break;
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] == (int)ff.Value);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && (int)x.ItemArray[fieldIndex] != (int)ff.Value);
										break;
									case ComparisonConstants.Between:
										queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && ff.Value2 != null && (int)x.ItemArray[fieldIndex] >= (int)ff.Value && (int)x.ItemArray[fieldIndex] <= (int)ff.Value2);
										break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
							#region String
							else if (field.DataType == RepositoryDefinition.DataTypeConstants.String)
							{
								switch (ff.Comparer)
								{
									case ComparisonConstants.LessThan:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) < 0);
										break;
									case ComparisonConstants.LessThanOrEq:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) <= 0);
										break;
									case ComparisonConstants.GreaterThan:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) > 0);
										break;
									case ComparisonConstants.GreaterThanOrEq:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) >= 0);
										break;
									case ComparisonConstants.Equals:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) == 0);
										break;
									case ComparisonConstants.NotEqual:
										queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) != 0);
										//queriedList = queriedList.Intersect(_list.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) != 0));
										//queriedList = queriedList.Where(x => string.Compare((string)x.ItemArray[fieldIndex], (string)ff.Value) != 0).ToList();
										break;
									//case ComparisonConstants.Between:
									//	queriedList = queriedList.Where(x => x.ItemArray[fieldIndex] != null && ff.Value != null && ff.Value2 != null && (string)x.ItemArray[fieldIndex] >= (string)ff.Value && (string)x.ItemArray[fieldIndex] <= (string)ff.Value2);
									//	break;
									default:
										throw new Exception("This operation is not supported!");
								}
							}
							#endregion
						}
					}
					#endregion

					#region Then do text search
					var keywordSearchablelist = _repositoryDefinition.FieldList.Where(x => x.AllowTextSearch && x.DataType == RepositoryDefinition.DataTypeConstants.String).ToList();
					if (!string.IsNullOrEmpty(query.Keyword) && keywordSearchablelist.Count > 0)
					{
						var keywordList = query.Keyword.Split(new char[] { ' ', '\t', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
						System.Linq.Expressions.Expression<Func<DataItemExtension, bool>> w = x => false;
						foreach (var field in keywordSearchablelist)
						{
							var index = _repositoryDefinition.FieldList.IndexOf(field);
							foreach (var kw in keywordList)
							{
								w = w.Or(x =>
										 x.ItemArray[index] != null &&
												 ((string)x.ItemArray[index]).Contains(kw, StringComparison.OrdinalIgnoreCase)
									);
							}
						}
						queriedList = queriedList.Where(w.Compile());
					}
					#endregion

					//Determine SortBy lambda
					if (query.FieldSorts != null && query.FieldSorts.Count() > 0)
					{
						OrderedParallelQuery<DataItemExtension> tempOrder = null;
						foreach (var sf in query.FieldSorts)
						{
							var field = _repositoryDefinition.FieldList.First(x => x.Name == sf.Name);
							var index = _repositoryDefinition.FieldList.IndexOf(field);
							if (sf.SortDirection == SortDirectionConstants.Asc)
							{
								if (tempOrder == null) tempOrder = queriedList.AsParallel().OrderBy(x => x.ItemArray[index]);
								else tempOrder = tempOrder.ThenBy(x => x.ItemArray[index]);
							}
							else if (sf.SortDirection == SortDirectionConstants.Desc)
							{
								if (tempOrder == null) tempOrder = queriedList.AsParallel().OrderByDescending(x => x.ItemArray[index]);
								else tempOrder = tempOrder.ThenByDescending(x => x.ItemArray[index]);
							}
						}
						queriedList = tempOrder.ThenBy(x => x.__RecordIndex);
					}
					else
					{
						//Tack on recordindex sort for consistency
						queriedList = queriedList.AsParallel().OrderBy(x => x.__RecordIndex);
					}

					myWatch2.Stop();
					myWatch2.Reset();
					myWatch2.Start();

					//var queriedList2 = queriedList.ToList();

					myWatch2.Stop();
					_timeList.Add(string.Empty + myWatch2.ElapsedMilliseconds);
					myWatch2.Reset();
					myWatch2.Start();

					//Convert to list
					queriedList = queriedList.AsParallel().ToList();

					myWatch2.Stop();
					_timeList.Add(string.Empty + myWatch2.ElapsedMilliseconds);
					myWatch2.Reset();
					myWatch2.Start();

					var isZeroResult = false;
					var skipCount = (query.PageOffset - 1) * query.RecordsPerPage;
					var takeCount = query.RecordsPerPage;
					if (skipCount >= queriedList.Count())
					{
						isZeroResult = true;
					}
					else
					{
						if (skipCount + takeCount > queriedList.Count())
							takeCount = queriedList.Count() - skipCount;
					}

					if (query.RecordsPerPage < 1) isZeroResult = true;

					//var newResults = new DataQueryResults();
					//newResults.DimensionList = new List<DimensionItem>();
					//newResults.RecordList = new List<DataItem>();
					if (!isZeroResult)
					{
						queriedList.Skip(skipCount).Take(takeCount).ToList().ForEach(x => newResults.RecordList.Add(x.ToSerialized()));
					}
					newResults.TotalRecordCount = queriedList.Count();

					//Process the distance for geo-located filters
					var geoFilter = query.FieldFilters.FirstOrDefault(x => x is GeoCodeFieldFilter) as GeoCodeFieldFilter;
					if (geoFilter != null)
					{
						var fieldDef = _repositoryDefinition.FieldList.FirstOrDefault(x => x.Name == geoFilter.Name);
						var fieldIndex = _repositoryDefinition.FieldList.IndexOf(fieldDef);
						foreach (var item in newResults.RecordList)
						{
							var geoField = item.ItemArray[fieldIndex] as GeoCode;
							geoField.Distance = GeoHelper.Calc(geoFilter.Latitude, geoFilter.Longitude, geoField.Latitude, geoField.Longitude);
						}
					}

					myWatch2.Stop();
					_timeList.Add(string.Empty + myWatch2.ElapsedMilliseconds);
					myWatch2.Reset();
					myWatch2.Start();

					//Now build the dimension list
					if (_list.Count >= THREAD_THRESHOLD && _coreCount >= THREAD_CORE_COUNT)
					{
						//Threaded
						var threadList = new List<System.Threading.Thread>();
						var dimensionDefList = _repositoryDefinition.DimensionList.ToList();
						for (var ii = 0; ii < dimensionDefList.Count; ii++)
						{
							if (_dimensionList[ii].RefinementList.Count > 0)
							{
								var threader = new QueryDimensionTheader(newResults, queriedList, _dimensionList, _repositoryDefinition, ii);
								var checkDimension = _dimensionList.First(y => y.Name == dimensionDefList[ii].Name);
								var checkRefinementList = checkDimension.RefinementList.Select(z => z.DVIdx).ToList();
								if (!newResults.Query.DimensionValueList.Any(x => checkRefinementList.Contains(x)))
								{
									var t = new System.Threading.Thread(threader.ProcessDimension);
									threadList.Add(t);
									t.Start();
								}
							}
						}
						foreach (var t in threadList) t.Join();
					}
					else
					{
						//Non-Threaded
						for (var ii = 0; ii < _repositoryDefinition.DimensionList.Count(); ii++)
						{
							if (_dimensionList[ii].RefinementList.Count > 0)
							{
								myWatch2.Stop();
								myWatch2.Reset();
								myWatch2.Start();

								var threader = new QueryDimensionTheader(newResults, queriedList, _dimensionList, _repositoryDefinition, ii);
								threader.ProcessDimension();

								myWatch2.Stop();
								_timeList.Add(_dimensionList[ii].Name + " " + myWatch2.ElapsedMilliseconds);
							}
						}
					}

					//Get dimensions with parent
					var isMasterResults = (query.NonParsedFieldList["masterresults"] == "true" || query.NonParsedFieldList["masterresults"] == "1");
					var defItemsWithParent = _repositoryDefinition.DimensionList.Where(y => !string.IsNullOrEmpty(y.Parent));
					if (!isMasterResults)
					{
						var isSet = true;
						while (isSet)
						{
							isSet = false;
							var childDimensions = newResults.DimensionList.Where(x => defItemsWithParent.Any(z => z.Name == x.Name)).ToList();
							foreach (var dItem in childDimensions)
							{
								var childDef = defItemsWithParent.First(x => x.Name == dItem.Name);
								var parent = newResults.DimensionList.FirstOrDefault(x => x.Name == childDef.Parent);
								if (parent != null)
								{
									newResults.DimensionList.Remove(dItem);
									isSet = true;
								}
							}
						}
					}

					//Now associate the parent with each dimension
					foreach (var dimension in defItemsWithParent)
					{
						var d = newResults.DimensionList.FirstOrDefault(x => x.Name == dimension.Name);
						if (d != null)
						{
							//This is the returned dimension object
							//If its parent dimension is in the returned set then associate them
							d.Parent = newResults.DimensionList.FirstOrDefault(x => x.Name == dimension.Parent);
						}
					}

					myWatch2.Stop();
					_timeList.Add(string.Empty + myWatch2.ElapsedMilliseconds);

					timer.Stop();
					newResults.ComputeTime = timer.ElapsedMilliseconds;

					//Cache this object if necessary
					if (_cacheControl.ShouldCache(query))
						this.PerformCaching(qHash, newResults);

					//Log this query
					_queryLogger.Log(query.ToString(), (int)newResults.ComputeTime, newResults.TotalRecordCount, false);
					newResults.VersionHash = _versionHash;
					return newResults;
				}
				catch (Exception ex)
				{
					throw;
				}
			} //Lock
		}

		#endregion

		#region Reset

		public void Clear(UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			try
			{
				lock (_globalLock)
				{
					if (!this.IsLoaded)
					{
						ReloadMe(_startup);
					}
				}

				_list.Clear();

				var f = Path.Combine(_cacheFolder, "repository.data");
				if (File.Exists(f))
					File.Delete(f);

				ReloadMe(_startup);

			}
			catch (Exception ex)
			{
				Logger.LogError(ex.ToString());
				throw;
			}

		}

		/// <summary>
		/// Clears all data from the repository
		/// </summary>
		/// <param name="startup"></param>
		public void Reset(Celeriq.Utilities.ServiceStartup startup)
		{
			try
			{
				lock (_globalLock)
				{
					if (!this.IsLoaded)
					{
						ReloadMe(_startup);
					}
				}

				_list.Clear();

				var files = Directory.GetFiles(_cacheFolder, "*.data");
				foreach (var f in files) File.Delete(f);

				ReloadMe(startup);

			}
			catch (Exception ex)
			{
				Logger.LogError(ex.ToString());
				throw;
			}
		}

		//internal void ShutDown()
		//{
		//	_list.Clear();
		//	_list = null;
		//	_dimensionList.Clear();
		//	_dimensionList = null;

		//	_dDimensionCache = null;

		//	_pkList.Clear();
		//	_pkList = null;
		//	_deletedCache = null;

		//	_deletedList.Clear();
		//	_deletedList = null;
		//	_resultsCache = null;
		//	_fileCache = null;
		//	_cacheControl = null;

		//	_lastInserted = null;

		//	Logger.LogDebug("System.Core ShutDown");
		//}

		public bool ExportSchema(Celeriq.Utilities.ServiceStartup startup, UserCredentials credentials, string backupFile)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			lock (_globalLock)
			{
				//Copy all data files to temp folder
				//var fi = new FileInfo(Path.Combine(startup.RepositoryFolder, _repositoryDefinition.ID.ToString() + ".celeriq"));
				var fi = new FileInfo(Path.Combine(startup.RepositoryFolder, startup.RepositoryDefinition.ID.ToString() + ".celeriq"));
				if (fi.Exists)
				{
					if (File.Exists(backupFile))
					{
						File.Delete(backupFile);
						System.Threading.Thread.Sleep(400);
					}
					fi.CopyTo(backupFile);
				}
			}

			return true;
		}

		/// <summary>
		/// Copies the entire repository to an archive file
		/// </summary>
		public bool Backup(Celeriq.Utilities.ServiceStartup startup, UserCredentials credentials, string backupFile)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			lock (_globalLock)
			{
				//Ensure filename only
				backupFile = (new FileInfo(backupFile)).Name;

				//Create a temp folder
				var tempFolder = Path.GetTempPath();
				tempFolder = Path.Combine(tempFolder, Guid.NewGuid().ToString());
				Directory.CreateDirectory(tempFolder);

				//Get the repository folder
				var repositoryFolder = Path.Combine(startup.RepositoryFolder, startup.RepositoryDefinition.ID.ToString());

				//Copy all data files to temp folder
				var files = Directory.GetFiles(repositoryFolder, "*.data");
				foreach (var file in files)
				{
					var fi = new FileInfo(file);
					File.Copy(file, Path.Combine(tempFolder, fi.Name));
				}

				var backupFolder = Path.Combine(startup.RepositoryFolder, @"..\Backup");
				if (!Directory.Exists(backupFolder)) Directory.CreateDirectory(backupFolder);

				var fullBackup = Path.Combine(backupFolder, backupFile);
				var backupfi = new FileInfo(fullBackup);
				if (string.IsNullOrEmpty(backupfi.Extension))
					fullBackup = ".cqbak";

				//Write the repository definition
				startup.RepositoryDefinition.ToDisk(Path.Combine(tempFolder, "definition.xml"));

				ArchiveDomain.CreateArchive(tempFolder, "*.*", fullBackup);

				//Remove the temp folder
				Directory.Delete(tempFolder, true);
			}

			return true;
		}

		/// <summary>
		/// Given an repository archive file, this method will create a new repository from it
		/// </summary>
		/// <param name="startup"></param>
		/// <param name="backupFile"></param>
		/// <returns></returns>
		public bool Restore(Celeriq.Utilities.ServiceStartup startup, UserCredentials credentials, string backupFile)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			lock (_globalLock)
			{
				//Reset the entire repository
				this.Reset(startup);

				var backupFolder = Path.Combine(startup.RepositoryFolder, @"..\Backup");
				var fullBackup = Path.Combine(backupFolder, backupFile);

				//Unzip all files
				ArchiveDomain.ExtractArchive(_cacheFolder, fullBackup);

				var name = _repositoryDefinition.Name;
				var id = _repositoryDefinition.ID;

				var defFile = Path.Combine(_cacheFolder, "definition.xml");
				_repositoryDefinition.FromXML(File.ReadAllText(defFile));

				//Reset repository properties
				_repositoryDefinition.Name = name;
				_repositoryDefinition.ID = id;

				//Save new repository definition
				defFile = Path.Combine(startup.RepositoryFolder, _repositoryDefinition.ID.ToString() + ".celeriq");
				_repositoryDefinition.ToDisk(defFile);

				//Now refresh the entire repository
				ReloadMe(startup);
			}

			return true;
		}

		#endregion

		#region Compress

		/// <summary>
		/// The disk data for this repository will be compressed
		/// </summary>
		/// <remarks>The repository will be offline while this operation runs</remarks>
		public void Compress(UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			this.Compress();
		}

		private void Compress()
		{
			lock (_globalLock)
			{
				if (!this.IsLoaded)
				{
					ReloadMe(_startup);
				}

				//If there were any deleted items then compact the file and reset the deleted items
				if (_deletedList.Count > 0)
				{
					this.SaveData();

					//Clear cache
					if (_resultsCache != null)
						_resultsCache.Clear();
				}
			}
		}
		#endregion

		#region GetDataSize

		/// <summary>
		/// Returns the size on disk of the repository data
		/// </summary>
		/// <returns></returns>
		public long GetDataSize(UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			if (!Directory.Exists(_cacheFolder)) return 0;
			var files = Directory.GetFiles(_cacheFolder, "*.data");
			return files.Select(f => new FileInfo(f)).Select(fi => fi.Length).Sum();
		}
		#endregion

		#region GetItemCount

		/// <summary>
		/// Returns the number of items in the repository
		/// </summary>
		/// <returns></returns>
		public long GetItemCount(UserCredentials credentials)
		{
			if (!IsValidCredentials(credentials))
				throw new Exception("Invalid credentials");

			return GetItemCount();
		}

		internal long GetItemCount()
		{
			lock (_globalLock)
			{
				if (this.IsLoaded)
					return _list.Count;
				else
					return GetItemCountFromCache();
			}
		}

		#endregion

		#region Private Helpers

		public bool IsValidCredentials(UserCredentials credentials)
		{
			try
			{
				var user = _userList.FirstOrDefault(x => x.UserName == credentials.UserName);
				if (user == null) return false;
				var prehash = Celeriq.Utilities.SecurityHelper.Decrypt(_masterKeys.PrivateKey, credentials.Password);
				return (prehash == user.PreHash);
			}
			catch (Exception ex)
			{
				return false;
			}
		}

		private void LoadDeletedList()
		{
			try
			{
				lock (_globalLock)
				{
					var tlist = FileCacheHelper<long>.LoadAll(_deletedCache.CacheFileName);
					tlist.ForEach(x => _deletedList.Add(x));
				}
			}
			catch (Exception ex)
			{
				throw;
			}
		}

		private void InitializeDimension(DimensionItem dimension, FileCacheHelper<RefinementItem> cacheFile)
		{
			try
			{
				lock (_globalLock)
				{
					dimension.RefinementList.AddRange(FileCacheHelper<RefinementItem>.LoadAll(cacheFile.CacheFileName));
				}
			}
			catch (Exception ex)
			{
				throw;
			}
		}

		private void LoadData()
		{
			lock (_globalLock)
			{
				var tlist = new List<DataItemExtension>();
				tlist.AddRange(FileCacheHelper<DataItemExtension>.LoadAll(_fileCache.CacheFileName));
				foreach (var item in tlist)
				{
					if (!_deletedList.Contains(item.__RecordIndex))
					{
						//pre-cache the dimension values to items
						item.DimensionValueArray.ForEach(x => _dimensionMappedItemCache[x].Add(item));
						_list.Add(item);
						_pkList.Add((int)item.ItemArray[_pkindex]);
					}
				}

				this.Compress();

				if (_list.Count != 0)
					_maxRecordIndex = _list.Max(x => x.__RecordIndex);
			}
			_resultsCache = new SequencedHashTable<int, DataQueryResults>();
			_isLoaded = true;
			WriteCacheFile();
		}

		private void SaveData()
		{
			var newCache = new FileCacheHelper<DataItem>();
			var index = 0;
			while (index < _list.Count)
			{
				var takeCount = 1000;
				if (index + takeCount > _list.Count)
					takeCount = _list.Count - index;
				newCache.WriteItem(_list.AsParallel().Skip(index).Take(takeCount).ToArray());
				index += takeCount;
			}

			if (File.Exists(_fileCache.CacheFileName))
				File.Delete(_fileCache.CacheFileName);

			File.Move(newCache.CacheFileName, _fileCache.CacheFileName);

			_deletedList.Clear();
			if (File.Exists(_deletedCache.CacheFileName))
				File.Delete(_deletedCache.CacheFileName);
		}

		/// <summary>
		/// Determines if this result set should be cached and if so do it
		/// </summary>
		private void PerformCaching(int qHash, DataQueryResults results)
		{
			if (!_resultsCache.ContainsKey(qHash))
			{
				//Remove the first item cached if we have hit cache limit
				if (_resultsCache.Count >= _cacheControl.MaxItems)
				{
					var k = _resultsCache.OrderedKeys.FirstOrDefault();
					if (k != null) _resultsCache.Remove(k);
					else
					{
						System.Diagnostics.Debug.WriteLine("Key not found. 0x2998");
						Logger.LogDebug("PerformCaching has no OrderedKeys but " + _resultsCache.Count + " cached items");
					}
				}
				_resultsCache.Add(qHash, results);
			}
			else
			{
				_resultsCache[qHash] = results;
			}
		}

		private void ProcessDimensions(DataItemExtension newItem)
		{
			try
			{
				var dimensionDefList = _repositoryDefinition.DimensionList.ToList();
				for (var ii = 0; ii < dimensionDefList.Count; ii++)
				{
					var dimensionDef = dimensionDefList[ii];
					var dimension = _dimensionList.First(x => x.Name == dimensionDef.Name);
					var field = _repositoryDefinition.FieldList.First(x => x.Name == dimensionDef.Name);
					var fieldIndex = _repositoryDefinition.FieldList.IndexOf(field);

					#region String
					if (field.DataType == RepositoryDefinition.DataTypeConstants.String)
					{
						if (!string.IsNullOrEmpty((string)newItem.ItemArray[fieldIndex]))
						{
							lock (dimension)
							{
								var refinement = dimension.RefinementList.FirstOrDefault(x => x.FieldValue == (string)newItem.ItemArray[fieldIndex]);
								if (refinement == null)
								{
									refinement = new RefinementItem() { Count = 1, DVIdx = dimension.GetNextDVIdx(), FieldValue = (string)newItem.ItemArray[fieldIndex] };
									dimension.RefinementList.Add(refinement);
									_dDimensionCache[ii].WriteItem(refinement);
									_dimensionMappedItemCache.Add(refinement.DVIdx, new List<DataItemExtension>());
								}
								else
								{
									refinement.Count++;
								}

								//This is for the non-list dimensions for fast dimensions aggregate computation
								newItem.DimensionSingularValueArray[ii] = refinement.DVIdx;

								//Add the list of all dimension values for this item
								if (!newItem.DimensionValueArray.Contains(refinement.DVIdx))
									newItem.DimensionValueArray.Add(refinement.DVIdx);

							}
						}
					}
					#endregion

					#region List
					else if (field.DataType == RepositoryDefinition.DataTypeConstants.List)
					{
						if (newItem.ItemArray[fieldIndex] != null)
						{
							lock (dimension)
							{
								var fieldValueList = (string[])newItem.ItemArray[fieldIndex];
								foreach (var fieldValue in fieldValueList)
								{
									var refinement = dimension.RefinementList.FirstOrDefault(x => x.FieldValue == fieldValue);
									if (refinement == null)
									{
										refinement = new RefinementItem() { Count = 1, DVIdx = dimension.GetNextDVIdx(), FieldValue = fieldValue };
										dimension.RefinementList.Add(refinement);
										_dDimensionCache[ii].WriteItem(refinement);
										_dimensionMappedItemCache.Add(refinement.DVIdx, new List<DataItemExtension>());
									}
									else
									{
										refinement.Count++;
									}

									//This is for the non-list dimensions for fast dimensions aggregate computation
									newItem.DimensionSingularValueArray[ii] = refinement.DVIdx;

									//Add the list of all dimension values for this item
									if (!newItem.DimensionValueArray.Contains(refinement.DVIdx))
										newItem.DimensionValueArray.Add(refinement.DVIdx);

								}
							}
						}
					}
					#endregion

					#region Integer
					else if ((field.DataType == RepositoryDefinition.DataTypeConstants.Int) && dimension.NumericBreak == null)
					{
						if (newItem.ItemArray[fieldIndex] != null)
						{
							lock (dimension)
							{
								var fieldValue = (int)newItem.ItemArray[fieldIndex];
								var refinement = dimension.RefinementList.FirstOrDefault(x => x.FieldValue == fieldValue.ToString());
								if (refinement == null)
								{
									refinement = new RefinementItem()
									{
										Count = 1,
										DVIdx =
										dimension.GetNextDVIdx(),
										FieldValue = fieldValue.ToString(),
										MinValue = fieldValue,
										MaxValue = fieldValue
									};
									dimension.RefinementList.Add(refinement);
									_dDimensionCache[ii].WriteItem(refinement);
									_dimensionMappedItemCache.Add(refinement.DVIdx, new List<DataItemExtension>());
								}
								else
								{
									refinement.Count++;
								}

								//This is for the non-list dimensions for fast dimensions aggregate computation
								newItem.DimensionSingularValueArray[ii] = refinement.DVIdx;

								//Add the list of all dimension values for this item
								if (!newItem.DimensionValueArray.Contains(refinement.DVIdx))
									newItem.DimensionValueArray.Add(refinement.DVIdx);

							}
						}
					}
					else if ((field.DataType == RepositoryDefinition.DataTypeConstants.Int) && dimension.NumericBreak != null)
					{
						if (newItem.ItemArray[fieldIndex] != null)
						{
							lock (dimension)
							{
								var fieldValue = (int)newItem.ItemArray[fieldIndex];
								var minLevel = ((long)fieldValue / dimension.NumericBreak.Value) * dimension.NumericBreak.Value;
								var refinement = dimension.RefinementList.FirstOrDefault(x => x.MinValue == minLevel);
								if (refinement == null)
								{
									refinement = new RefinementItem()
									{
										Count = 1,
										DVIdx = dimension.GetNextDVIdx(),
										FieldValue = minLevel.ToString("###,###,###,##0") + " - " + (minLevel + dimension.NumericBreak.Value).ToString("###,###,###,##0"),
										MinValue = minLevel,
										MaxValue = minLevel + dimension.NumericBreak.Value
									};
									dimension.RefinementList.Add(refinement);
									_dDimensionCache[ii].WriteItem(refinement);
									_dimensionMappedItemCache.Add(refinement.DVIdx, new List<DataItemExtension>());
								}
								else
								{
									refinement.Count++;
								}

								//This is for the non-list dimensions for fast dimensions aggregate computation
								newItem.DimensionSingularValueArray[ii] = refinement.DVIdx;

								//Add the list of all dimension values for this item
								if (!newItem.DimensionValueArray.Contains(refinement.DVIdx))
									newItem.DimensionValueArray.Add(refinement.DVIdx);

							}
						}
					}
					#endregion

					else
					{
						throw new Exception("Unsupported dimension data type!");
					}

				}

				//Update the dimension cache
				foreach (var dvidx in newItem.DimensionValueArray)
				{
					_dimensionMappedItemCache[dvidx].Add(newItem);
				}

			}
			catch (Exception ex)
			{
				throw;
			}
		}

		private bool IsItemValid(DataItemExtension item)
		{
			if (item.ItemArray == null) return false;
			if (item.ItemArray.Length != _repositoryDefinition.FieldList.Count) return false;

			var index = 0;
			foreach (var field in _repositoryDefinition.FieldList)
			{
				if (item.ItemArray[index] != null)
				{
					switch (field.DataType)
					{
						case RepositoryDefinition.DataTypeConstants.Bool:
							if (!(item.ItemArray[index] is bool)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.DateTime:
							if (!(item.ItemArray[index] is DateTime)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.Float:
							if (!(item.ItemArray[index] is double)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.GeoCode:
							if (!(item.ItemArray[index] is GeoCode)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.Int:
							if (!(item.ItemArray[index] is int)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.String:
							if (!(item.ItemArray[index] is string)) return false;
							break;
						case RepositoryDefinition.DataTypeConstants.List:
							if (!(item.ItemArray[index] is string[])) return false;
							break;
						default:
							throw new Exception("Unknown data type!");
					}
				}
				index++;
			}
			return true;
		}

		private System.Linq.Expressions.Expression GetEqualsExpr(System.Linq.Expressions.ParameterExpression param, string property, string value)
		{
			var prop = System.Linq.Expressions.Expression.Property(param, property);
			var val = System.Linq.Expressions.Expression.Constant(value);
			return System.Linq.Expressions.Expression.Equal(prop, val);
		}

		#endregion
	}

}