﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Collections;
using System.Text.RegularExpressions;
using System.Collections.Specialized;
using Common;
using Stemmer;

namespace Searching
{
    public class Searcher
    {
        //For scoring calculations.
        private readonly float Bc = 0.75f;
        private readonly float K1 = 2; 
        private readonly float TITLE_FIELD_BOOST = 20.0f;
        private readonly float SUMMARY_FIELD_BOOST = 10.0f;
        private readonly float NO_BOOST = 1.0f;
        /// <summary>Display string: time the search too</summary>
        private string _DisplayTime;
        /// <summary>Display string: matches (links and number of)</summary>
        private string _Matches = "";

        private string SearchQueryMatchHtml
        {
            get { return _Matches; }
            set { _Matches = value; }
        }

        private string DisplayTime
        {
            get { return _DisplayTime; }
            set { _DisplayTime = value; }
        }


        /// <summary>
        /// Performs the search according to the query, and returns a list of scored documents matching the query.
        /// </summary>
        /// <param name="searchterm"></param>
        /// <param name="catalog"></param>
        /// <returns></returns>
        /// 

        private int[] GetNumberOfDocumentsContainingWordArray(string[] aSearchTerm, Catalog Catalog)
        {
            int[] aNumberOfDocumentsContainingWord = new int[aSearchTerm.Length];
            for (int i = 0; i < aSearchTerm.Length; i++)
            {
               aNumberOfDocumentsContainingWord[i] = Catalog.GetDocumentListPerWord(aSearchTerm[i]).Count;
            }
            return aNumberOfDocumentsContainingWord;
        }

        private int[] GetMaximalDistanceBetweenTermsArray(string[] aSearchTerm, List<DistanceQuery> DistanceQueryList)
        {
            int[] aMaximalDistanceBetweenTerms = new int[aSearchTerm.Length - 1];
            foreach (DistanceQuery dq in DistanceQueryList)
            {
                for (int i = 0; i < aSearchTerm.Length - 1; i++)
                    if (dq.Word1 == aSearchTerm[i])
                        aMaximalDistanceBetweenTerms[i] = dq.Distance;
            }
            return aMaximalDistanceBetweenTerms;
        }

        private List<Document> GetMatchingScoredDocuments(string Query, Catalog Catalog)
        {
            List<Document> output = new List<Document>();
            if ((null == Query) || (null == Catalog))
                return output;

            List<DistanceQuery> DistanceQueryList = new List<DistanceQuery>();
            Hashtable DocumentsToRank = new Hashtable(); // <DocumentID, SearchTermArray (words appearing in document) >

            DocumentsToRank = Search(Query, Catalog, out DistanceQueryList);
            foreach (DictionaryEntry DocumentIDSearchTermArray in DocumentsToRank)
            {
                long docID = (long)DocumentIDSearchTermArray.Key;
                Document doc = Catalog.GetDocumentByID(docID);

                //Gets an array of search terms.
                string[] aSearchTerm = DocumentIDSearchTermArray.Value as string[];

                //For each word, the number of documents containing it
                int[] aNumOfDocumentsContainingWord;
                aNumOfDocumentsContainingWord = GetNumberOfDocumentsContainingWordArray(aSearchTerm, Catalog);

                //For each 2 terms, the maximal distance between them.
                int[] aMaximalDistanceBetweenTerms = new int[aSearchTerm.Length];
                aMaximalDistanceBetweenTerms = GetMaximalDistanceBetweenTermsArray(aSearchTerm, DistanceQueryList);
                //aMaximalDistanceBetweenTerms[0] = 4; // maximal distance of 2 between first 2 terms. for testing..

                //Gets the document score itself, add it to output list if relevant.
                doc.Score = GetDocumentScore(aSearchTerm, aMaximalDistanceBetweenTerms, doc, Catalog, aNumOfDocumentsContainingWord);
                if (doc.Score != -1)
                    output.Add(doc);
            }
           // output = SortResultsByUrgencyDateRank(output);
            return output;
        }

              


        private void SetFieldBoosts(ref Document Document)
        {
            Document.Fields[(int)Document.Section.Title].Boost = TITLE_FIELD_BOOST;
            Document.Fields[(int)Document.Section.Summary].Boost = SUMMARY_FIELD_BOOST;
            Document.Fields[(int)Document.Section.Text].Boost = NO_BOOST;
            Document.Fields[(int)Document.Section.Author].Boost = NO_BOOST;

        }

        /// <summary>
        /// Gets total document's score : BM25F + Term Proximity.
        /// </summary>
        /// <param name="SearchTermArray"></param>
        /// <param name="Document"></param>
        /// <param name="Catalog"></param>
        /// <param name="NumOfDocumentsContainingWordArray">For each search term the number of documents containing it</param>
        /// <returns>document score, -1 if document is irrelevant (terms are too far apart)</returns>
        private double GetDocumentScore(string[] aSearchTerm, int[] aMaximalDistanceBetweenTerms, Document Document, Catalog Catalog,
            int[] aNumOfDocumentsContainingTerm)
        {
        
            double Score = 0;
          
            double Weight = 0;
            List<WordPos> WordPosList;
            int[] aTermOccurances = new int[Document.NumberOfFields];
            double idf = 0;
         

            //Set field boosts.
            SetFieldBoosts(ref Document);

            //Begins scoring
            for (int i = 0; i < aSearchTerm.Length; i++) //For each search term : 
            {
                WordPosList = Catalog.GetDocumentListPerWord(aSearchTerm[i])[Document.Id] as List<WordPos>;
                foreach (WordPos wp in WordPosList)
                {
                    aTermOccurances[(int)wp.Section]++;
                }
                for (int j = 0; j < Document.NumberOfFields; j++)
                {
                    //calculate Weight sigma (sum weight for each field)
                    if (Catalog.SumOfFieldLength[j] > 0 && Document.Fields[j].Length > 0)
                        Weight += ((float)aTermOccurances[j] / (float)Document.Fields[j].Length * Document.Fields[j].Boost) /
                            ((1 - Bc) + (Bc * (float)Document.Fields[j].Length / ((float)Catalog.SumOfFieldLength[j]
                            / (float)Catalog.NumberOfDocumentsIndexed)));
                }


                //calculate total score for this term: 

                idf = (float)Math.Log((float)(Catalog.NumberOfDocumentsIndexed /*- NumOfDocumentsContainingTermArray[i] + 0.5*/) /
                   (float)(aNumOfDocumentsContainingTerm[i]/* + 0.5*/));
                Score += (idf * Weight) / (K1 + Weight);
            }
            //Word doesn't appear in desired form in the document.
            if (Score == 0) return -1;
            //If there is only one search term, don't need proximity.
            if (aSearchTerm.Length <= 1) return Score;

            //Calculate proximity : 
            double ProximityScore = 0;
            bool IsDocumentRelevant = false;
            //For each field, get proximity Score.
            for (int j = 0; j < Document.NumberOfFields; j++)
            {

                ProximityScore = GetProximityScorePerField(aSearchTerm, aMaximalDistanceBetweenTerms, Document, Catalog, aNumOfDocumentsContainingTerm,
                     (Document.Section)j, Catalog.SumOfFieldLength[j] / Catalog.NumberOfDocumentsIndexed,
                     Document.Fields[j].Length);
                if (ProximityScore != -1.0f) //No terms within required distance.
                {
                     Score += ProximityScore;
                    IsDocumentRelevant = true;
                }
                   
            }
            
            if (!IsDocumentRelevant) return -1;
            //Score is : BM25F + Sigma(Proximity scoring for each field)
            return Score;

        }

        //K for a field(for proximity score calculation)
        private double Kfield(int FieldLength, double averageFieldLength)
        {
            return K1 * (1 - Bc + (Bc * FieldLength / averageFieldLength));
        }

        //Wt for a field (for proximity calc)
        private double wt(int NumOfIndexedDocuments, int NumOfDocumentsContainingWord)
        {
            return Math.Log((double)NumOfIndexedDocuments / (double)NumOfDocumentsContainingWord);
        }

        /// <summary>
        /// Gets hash of [term position, term index in term array] for the search terms in the document in a specific field.
        /// </summary>
        /// <param name="SearchTermArray"></param>
        /// <param name="Catalog"></param>
        /// <param name="Document"></param>
        /// <param name="Section">Field in the document</param>
        /// <returns></returns>
        private DictionaryEntry[] GetTermPositionTermIndexListPerField(string[] aSearchTerm, Catalog Catalog, Document Document, Document.Section Section)
        {
             SortedList WordPositionTermList = new SortedList();
            for (int i = 0; i < aSearchTerm.Length; i++) //For each search term get all wordpos's into SortedList
            {
                foreach (WordPos w in Catalog.GetDocumentListPerWord(aSearchTerm[i])[Document.Id] as List<WordPos>)
                {
                    if (w.Section == Section && (!WordPositionTermList.Contains(w.Position))) 
                        WordPositionTermList.Add(w.Position, i);
                }
            }
            //Now we have a sorted list of wordpos for the document, each one knows which word it belongs to.

            //Iterate throughthe list.
            DictionaryEntry[] WordPositionTermArray = new DictionaryEntry[WordPositionTermList.Count]; //Acc[i]
            WordPositionTermList.CopyTo(WordPositionTermArray, 0);
            return WordPositionTermArray;
        }

        /// <summary>
        /// checks if there are ANY adjacent terms within maximal distance as defined by aMaximalDistanceBetweenTerms.
        /// </summary>
        /// <param name="aMaximalDistanceBetweenTerms"></param>
        /// <param name="aTermPositionTermIndex"></param>
        /// <returns>true if there are at least 2 terms within maximal distance.</returns>
        private bool AreTermsWithinMaximumDistance(int[] aMaximalDistanceBetweenTerms, DictionaryEntry[] aTermPositionTermIndex)
        {
            for (int i = 0; i < aTermPositionTermIndex.Length - 1; i++)
            {
                //If this word is is (i) and next word is (i-1) in search term array, and there is a max dist defined
                if ((int)(aTermPositionTermIndex[i].Value) == (int)(aTermPositionTermIndex[i + 1].Value) + 1)
                {
                    //There is no distance defined.
                    if (aMaximalDistanceBetweenTerms[(int)(aTermPositionTermIndex[i].Value) - 1] == 0)
                        return true;
                    else
                    {
                        //If terms are closer than defined distance
                        if (Math.Abs((int)aTermPositionTermIndex[i].Key - (int)aTermPositionTermIndex[i + 1].Key) <=
                        aMaximalDistanceBetweenTerms[(int)(aTermPositionTermIndex[i].Value) - 1])
                            return true;
                        //else i++; //Jump to next Sequence of terms. (if we are evaluating term1 and term2, 
                        //then after we don't want to evaluate term2 and the next instance of term1 (they are not related)
                    }
                }
                else
                //If this word is is (i) and next word is (i+1) in search term array, and there is a max dist defined
                if ((int)(aTermPositionTermIndex[i].Value) == (int)(aTermPositionTermIndex[i + 1].Value) - 1)
                {
                    //There is no distance defined.
                    if (aMaximalDistanceBetweenTerms[(int)(aTermPositionTermIndex[i].Value)] == 0)
                        return true;
                    else
                    {
                        //If terms are closer than defined distance
                        if (Math.Abs((int)aTermPositionTermIndex[i].Key - (int)aTermPositionTermIndex[i + 1].Key) <=
                    aMaximalDistanceBetweenTerms[(int)(aTermPositionTermIndex[i].Value)])
                            return true;
                        //else i++;
                    }
                }
            }
            return false;
        }

        /// <summary>
        /// Gets Proximity Score for field for the terms. 
        /// </summary>
        /// <param name="SearchTermArray"></param>
        /// <param name="aMaximalDistanceBetweenTerms">An array of the maximum distance between each sequential terms</param>
        /// <param name="Document"></param>
        /// <param name="Catalog"></param>
        /// <param name="NumOfDocumentsContainingWordArray">for each term i , the number of docs containing it</param>
        /// <param name="Section">Field</param>
        /// <param name="AvgFieldLength">Average length of field in all indexed documents</param>
        /// <param name="FieldLength">Length of field in Document</param>
        /// <returns>proximity score for the document field, or -1 if field isn't relevant (the minimal distance between terms isn't met, terms don't appear
        /// in this field or this field is empty.)</returns>
        private double GetProximityScorePerField(string[] aSearchTerm, int[] aMaximalDistanceBetweenTerms, Document Document, Catalog Catalog, 
            int[] aNumOfDocumentsContainingTerm, Document.Section Section, double AvgFieldLength, int FieldLength)
            
        {
            //No documents with this field, or no proximity required.
            if (AvgFieldLength == 0 || aSearchTerm.Length < 2) return -1; 
            
            double Score = 0;
            double[] aAccumilator = new double[aSearchTerm.Length];
            
            //Get list of word positions for the field
            DictionaryEntry[] aTermPositionTermIndex = GetTermPositionTermIndexListPerField(aSearchTerm, Catalog, Document, Section);
            
            //If there are less than 2 word positions,
            if (aTermPositionTermIndex.Length < 2) 
                return -1; //then this field is not relevant.
            
            //If there aren't ANY adjacent terms within required distance,
            if (!AreTermsWithinMaximumDistance(aMaximalDistanceBetweenTerms, aTermPositionTermIndex)) 
                return -1; //This document is not relevant.
            
            for (int i = 0; i < aTermPositionTermIndex.Length - 1; i++) //for each word position ("posting") in word position list
            {
                if ((int)(aTermPositionTermIndex[i].Value) == (int)(aTermPositionTermIndex[i + 1].Value) + 1 || 
                    (int)(aTermPositionTermIndex[i].Value) == (int)(aTermPositionTermIndex[i + 1].Value) - 1) // if words are sequential,
                {
                    //Acc(Ti) = Acc(Ti) + Wtj * (1/dist(Ti,Tj)^2)
                    aAccumilator[(int)aTermPositionTermIndex[i].Value] += 
                         wt(Catalog.NumberOfDocumentsIndexed,
                        aNumOfDocumentsContainingTerm[(int)aTermPositionTermIndex[i + 1].Value]) * (((int)aTermPositionTermIndex[i].Key -
                        (int)aTermPositionTermIndex[i+1].Key) ^ (-2));
                    //Acc(Tj) = Acc(Tj) + Wti * (1/dist(Ti,Tj)^2)
                    aAccumilator[(int)aTermPositionTermIndex[i+1].Value] += 
                         wt(Catalog.NumberOfDocumentsIndexed,
                        aNumOfDocumentsContainingTerm[(int)aTermPositionTermIndex[i].Value]) * (((int)aTermPositionTermIndex[i].Key -
                        (int)aTermPositionTermIndex[i+1].Key) ^ (-2));

                }
            }
            /* total proximity scoring - using this  */
             for (int i = 0; i < aAccumilator.Length; i++) //per term
            {
                Score += CalculatePerFieldProximitySigma(FieldLength,
                    AvgFieldLength, wt(Catalog.NumberOfDocumentsIndexed,
                    aNumOfDocumentsContainingTerm[i]), aAccumilator[i]);
            }
        
            return Score;
        }

        //Proximity Sigma  
        private double CalculatePerFieldProximitySigma(int FieldLength, double AverageFieldLength, double wt, double ACCi)
        {
            return (wt < 1 ? wt : 1) * (ACCi * (K1 + 1) / (ACCi + Kfield(FieldLength, AverageFieldLength)));
        }

        List<Document> SortResultsByUrgencyDateTypeRank(List<Document> Documents)
        {
            var sorted = from d in Documents
                         orderby d.SitePriority descending, d.DateTime.Date descending, d.DateTime.Hour descending, d.DocType ascending, d.Score descending
                         select d;
            return new List<Document>(sorted);
        }

        private List<Document> GetPrioritizedDocuments(List<Document> ScoredDocuments, List<DomainPriority> DomainPriorities)
        {
            List<Document> PrioritizedResults = new List<Document>(ScoredDocuments);
            foreach (DomainPriority dp in DomainPriorities)
            {
                foreach (Document d in PrioritizedResults)
                {
                    if (d.SiteName == dp.SiteName)
                        d.SitePriority = dp.Priority;
                }
            }
            return PrioritizedResults;
        }
       
		/// Parses the query and returns hashtable of document id as Key and array of SearchTerm as Value
		/// </summary>
		/// <param name="query">query to parse</param>
		/// <param name="catalog">catalog to work with</param>
		/// <param name="distanceQueryList">out paramater, DistanceQuery (List)</param>
		/// <returns></returns>
		private 
			//HashTable<DocumentID (long), SearchTermArray[]> 
			Hashtable Search(string query, Catalog catalog, out List<DistanceQuery> distanceQueryList)
		{
			Hashtable mainTable = new Hashtable();
			var res = QueryParser.ParseSearch(query) as object[];
			List<string> notList, orList, andList, allWordsList;

			andList = res[0] as List<string>;
			orList = res[1] as List<string>;
			notList = res[2] as List<string>;
			distanceQueryList = res[3] as List<DistanceQuery>;
			
			allWordsList = new List<string>();
			allWordsList.AddRange(notList);
			allWordsList.AddRange(orList);
			allWordsList.AddRange(andList);
			//allWordsList.AddRange(exactList);
			foreach (DistanceQuery distanceQuery in distanceQueryList)
			{
				allWordsList.Add(distanceQuery.Word1);
				allWordsList.Add(distanceQuery.Word2);
			}
			Hashtable wordDocIdHash = new Hashtable();
			
			CreateWordDocIdHash(ref wordDocIdHash, allWordsList, catalog);
            for (int i=0 ; i< notList.Count ; ++i)
                notList[i] =  QueryParser.ParseWord(notList[i], true);
            for (int i = 0; i < orList.Count; ++i)
                orList[i] = QueryParser.ParseWord(orList[i], true);
            for (int i = 0; i < andList.Count; ++i)
                andList[i] = QueryParser.ParseWord(andList[i], true);
			// place all doc ids of each word in orList in the main hashtable
			foreach (string word in orList)
			{
				//SearchTerm term = new SearchTerm(word, null);// TODO: users 
				var term = word;

				var wordDocList = wordDocIdHash[word] as List<long>;
				if (wordDocList == null) continue;
				foreach (long docId in wordDocList)
				{
					var termsList = mainTable[docId] as List<string>;
					if (termsList == null) termsList = new List<string>();
					if (termsList.Contains(term)) continue;
					termsList.Add(term);
					mainTable[docId] = termsList;
				}
			}

			// place all documents that contain all words from andList in the hashtable

			EnforceAnd(ref mainTable, wordDocIdHash, andList);

			// place all documents that contain both search term words in each DistanceQuery

			foreach (DistanceQuery distanceQuery in distanceQueryList)
			{
				string[] wordsArray = new string[] { distanceQuery.Word1, distanceQuery.Word2 };
				EnforceAnd(ref mainTable, wordDocIdHash, new List<string>(wordsArray));
			}

			
			// remove all documents that contain words that are not welcome

			foreach (string word in notList)
			{
				var list = wordDocIdHash[word] as List<long>;
				foreach (long docId in list)
				{
					if (mainTable.Contains(docId)) mainTable.Remove(docId);
				}
			}
			
			// convert each List<SearchTerm> to SearchTerm[]

			Hashtable returnTable = new Hashtable();

			foreach (long id in mainTable.Keys)
			{
				var list = mainTable[id] as List<string>;
				returnTable[id] = list.ToArray();
			}

			return returnTable;

		}

		/// <summary>
		/// updates mainTable to add all the document Ids that contain all the words in wordList
		/// </summary>
		/// <param name="mainTable">hashtable of long / SearchTerm (List)</param>
		/// <param name="wordDocIdHash">hashtable of string / long (document id)</param>
		/// <param name="wordList">list of string containing all the words to scan</param>
		private void EnforceAnd(ref Hashtable mainTable,Hashtable wordDocIdHash, List<string> wordList)
		{
			List<List<long>> andWordList = new List<List<long>>();
			foreach (string word in wordList)
				andWordList.Add(wordDocIdHash[word] as List<long>);

			var andDocList = PerformAnd(andWordList);
			// andDocIds now has doc IDs that contain all the words in the AND list
			foreach (long id in andDocList)
			{
				var idList = mainTable[id] as List<string>;
				if (idList == null) idList = new List<string>();

				foreach (string s in wordList)
				{
					//SearchTerm term = new SearchTerm(s, null); // TODO: ?
					var term = s;
					if (idList.Contains(term)) continue;
					idList.Add(term);
				}

				mainTable[id] = idList;
			}
		}

		private List<long> PerformAnd(List<List<long>> lists)
		{
			List<long> ret = new List<long>();

			if (lists.Count == 0) return ret;
			foreach (List<long> list in lists) if (null == list || list.Count == 0) return ret;
			foreach (long id in lists[0])
			{
				bool inAll = true;
				foreach (List<long> list in lists)
				{
					if (!list.Contains(id)) inAll = false;
					if (!inAll) break;
				}
				if (inAll) ret.Add(id);
			}
			return ret;
		}

		/// <summary>
		/// Returns Hashtable (string, List&lt;long&gt;)
		/// </summary>
		/// <param name="words">list of words to work on</param>
		/// <param name="cat">catalog file in whic to look</param>
		/// <returns>Hashtable (string, List&lt;long&gt;) </returns>
		private void CreateWordDocIdHash(ref Hashtable table, List<string> words, Catalog cat, bool exact)
		{
			//Hashtable table = new Hashtable();
			List<string> wordList = new List<string>();
			foreach (string s in words)
			{
				if (wordList.Contains(s)) continue;
				wordList.Add(s);
			}

			foreach (string word in wordList)
			{
				string wrd = word;
				if (word.StartsWith(QueryParser.ExactMatchString) && word.EndsWith(QueryParser.ExactMatchString))
				{
					exact = true;
					wrd = word.Substring(1, word.Length - 2);
				}
				else exact = false;
				var res = cat.GetDocumentListPerWord(wrd, exact);
				if (res.Count == 0) continue;
				List<long> docIds = table[wrd] as List<long>;
				if (docIds == null) docIds = new List<long>();
				foreach (long d in res.Keys)
				{
					if (!docIds.Contains(d)) docIds.Add(d);
				}
				table[wrd] = docIds;
			}

			//return table; // Keys : Word, Value : List<long> (Document.Id)
		}
		private void CreateWordDocIdHash(ref Hashtable table, List<string> words, Catalog cat)
		{
			CreateWordDocIdHash(ref table, words, cat, false);
		}      
   

        /// <summary>
        /// Main Search Function
        /// </summary>
        /// <param name="Query"></param>
        /// <param name="Catalog"></param>
        /// <param name="User"></param>
        /// <returns></returns>
        public List<Document> DoSearch(string Query, Catalog Catalog)
        {
            List<Document> results = new List<Document>();
            results = GetMatchingScoredDocuments(Query, Catalog);
            //results = GetPrioritizedDocuments(results, User.GetDomainPrioritiesByQuery(Query));
            results = SortResultsByUrgencyDateTypeRank(results);
            return results;
        }

        private List<long> Search(Hashtable wordDocIdHash, string query, Catalog cat, out List<DistanceQuery> dQueries)
        {
            var res = QueryParser.ParseSearch(query) as object[];
            List<string> notList, orList, andList;
            //List<DistanceQuery> dQueries;
            
			List<long> 
				docs = new List<long>(), 
                docAnd=new List<long>(), 
                docNot=new List<long>(), 
                docOr=new List<long>();

            andList = res[0] as List<string>;
            orList = res[1] as List<string>;
            notList = res[2] as List<string>;
            dQueries = res[3] as List<DistanceQuery>;
            
			// put all documents with words in the not list on one list
            foreach (string s in notList) 
            {
                if (andList.Contains(s)) andList.Remove(s); 
                if (orList.Contains(s)) orList.Remove(s);

				Word w = new Word(s);
				var wordDocs = wordDocIdHash[w] as List<long>;
				docNot.AddRange(wordDocs);
            }
			// the same for the or list
            foreach (string s in orList)
            {
                Word w = new Word(s);
                var wordDocs = wordDocIdHash[w] as List<long>;
                docOr.AddRange(wordDocs);
            }

			// count the number of times each document in the list of documents
			// of the and list appears

			Hashtable ht = new Hashtable();

			foreach (string s in andList)
			{
				Word andWord = new Word(s);
				foreach (long id in (wordDocIdHash[andWord] as List<long>))
				{
					object o = ht[id];
					if (o == null)
					{
						ht.Add(id, new IntWrapper(0));
					}
					else
					{
						IntWrapper dCount = ht[id] as IntWrapper;
						dCount++;
						ht[id] = dCount;
					}
				}
			}
			List<long> toRemove=new List<long>();
			foreach (long i in ht.Keys)			
			{
				if ((ht[i] as IntWrapper).Value != andList.Count) toRemove.Add(i);
			}
			foreach (int i in toRemove) ht.Remove(i);

			// ht should now contain all documents that contain all the words in the and list

			// remove from ht docs from the not list

			foreach (long id in docNot)
			{
				if (ht.ContainsKey(id)) ht.Remove(id);
			}

			List<long> finalDocList = new List<long>(ht.Keys as List<long>);
			// add all documents from the or list

			foreach (long id in docOr)
			{
				if (!finalDocList.Contains(id)) finalDocList.Add(id); 
			}

			// ht.Keys contains all the doc ids that should be in the result
			

			// enforce dQuery goes here ... ?

            return finalDocList;
        }
    }
	class IntWrapper
	{
		int _value;
		public int Value { get { return _value; } set { _value = value; } }
		public IntWrapper(int v) { _value = v; }
		public static IntWrapper operator++(IntWrapper i)
		{
			i.Value++;
			return i;
		}
	}   
}


