﻿package lucene.search
{
	import flash.filesystem.File;
	import flash.filesystem.FileMode;
	
	import lucene.document.Document;
	import lucene.document.Field;
	import lucene.file.FileHandler;
	import lucene.index.FieldInfo;
	import lucene.index.SegmentInfo;
	import lucene.index.Term;
	import lucene.query.TermQuery;
	
	/**
	 * ...
	 * @author FreakDev
	 */
	public class IndexReader  implements IReader
	{
		
		private var _directory:File;
		private var _indexFormat:int;
		private var _generation:Number;
		private var _segment:File
		private var _segNameCounter:int;
		private var _segCount:Number;
		
		// associative array keys are segName
		private var _segmentsInfos:Array;
		// associative array are not read in the same order they are populated
		// with this second array, segName are ordered in the right order
		private var _segInfoDic:Array;
		
		private var _docCount:int;
		
		public function IndexReader(directory:File) 
		{
			if (directory.isDirectory)
				this._directory = directory;
				
			this._generation = this._getGeneration();
			this._readSegment();
		}
		
		public function getSimilarity():Similarity
		{
			return Similarity.getDefault();
		}
		
		private function _getGeneration():Number
		{
			var segFile:File = this._directory.resolvePath('segments.gen');
			var reader:FileHandler = new FileHandler(segFile, FileMode.READ);
			
			reader.readUInt32();
			
			var gen1:Number, gen2:Number, gen1Low:int, gen1High:int, gen2Low:int, gen2High:int;
			
			gen1 = reader.readUInt64();
			gen2 = reader.readUInt64();
			
			if (gen1 == gen2) {
				return gen1;
			}
			else {
				throw new Error('Invalid index : invalid generation');
			}
			
		}
		
		public function count():int
		{
			return this._docCount;
		}		
		
		public function docFreq(term:Term):int
		{
		    var result:int = 0;
		    for each (var segInf:SegmentInfo in this._segmentsInfos) {
		        var termInfo:* = segInf.getTermInfo(term);
		        if (termInfo !== null) {
		            result += termInfo.docFreq;
		        }
		    }
		    return result;
		}		
		
		public function norm (docId:int, termField:String):Number
		{
		    if (docId >= this._docCount) {
		        return null;
		    }
		
		    var segmentStartId:int = 0;
		    var segInfo:SegmentInfo;
		    for each (segInfo in this._segmentsInfos) {
		        if (segmentStartId + segInfo.count() > docId) {
		            break;
		        }
		
		        segmentStartId += segInfo.count();
		    }
		
			/**
			 * @todo manage deleted doc
			 */		
		    return segInfo.norm(docId - segmentStartId, termField);
		}
		
		private function _readSegment():void
		{
			//trace ('readSgment');
			
			var segFileName:String = 'segments_' + this._generation.toString(36);
			
			this._segment = this._directory.resolvePath(segFileName);
			
			//trace (segFileName);
			
			var reader:FileHandler = new FileHandler();
			reader.open(this._segment, FileMode.READ);
			
			this._indexFormat = reader.readUInt32();
			if (this._indexFormat != SegmentInfo.FORMAT_SINGLE_NORM_FILE &&
				this._indexFormat != SegmentInfo.FORMAT_SHARED_DOC_STORE &&
				this._indexFormat != SegmentInfo.FORMAT_HAS_PROX) {
					
				throw new Error ('Unsupported segment version');
			}

			reader.readUInt32();
			reader.readUInt32();
			
			this._segNameCounter = reader.readUInt32();
			
			this._segCount = reader.readUInt32();
			//trace (this._segCount);
			
			this._docCount = 0;
			
			this._segmentsInfos = new Array();
			this._segInfoDic = new Array();
			
			for (var i:Number = 0; i < this._segCount; i++ )
			{
				var segName:String = reader.readString();
				//trace ("segName");
				//trace (segName);
				
				var segSize:int = reader.readUInt32();
				//trace ("segSize");
				//trace (segSize);
				
				var delGen:Number = reader.readUInt64();
				//trace ("delGen");
				//trace (delGen);
				
				// only for 2.3+
				var docStoreOffset:int;
				var docStoreSegment:String;
				var docStoreIsCompound:int;
				if (this._indexFormat <= SegmentInfo.FORMAT_SHARED_DOC_STORE) {
					docStoreOffset = reader.readUInt32();
					if (docStoreOffset != -1) {
						docStoreSegment = reader.readString();
						docStoreIsCompound = reader.readByte();
					}
				}
				
				var numField:int = 0;
				var hasSingleNormFile:Boolean = false;
				var normGen:Array = new Array();
				
				//if (-3 == this._indexFormat) {
					hasSingleNormFile = new Boolean(reader.readByte());
					//trace ("hasSingleNormFile");
					//trace (hasSingleNormFile);
					
					numField = reader.readUInt32();
					//trace ("numField");
					//trace (numField);
					
					if ( -1 != numField) {
						for (var fieldCount:int = 0; fieldCount < numField; fieldCount++ ) {
							normGen.push(reader.readUInt64());
							//trace ("normGen");
							//trace (normGen[normGen.length - 1]);
						}
						
						throw new Error('Separate norm files are not supported');
					}
					
				//}
				//else {
					//segInfos.docStoreOffset = reader.readByte();
					/**
					 * @todo manage other version of the segments file
					 */
				//}
				
				var isCompoundFile:int = reader.readByte();
				//trace ("isCompoundFile");
				//trace (isCompoundFile);
				
				// only for 2.4+
				var deletionCount:int;
				var hasProx:int;
				if (this._indexFormat <= SegmentInfo.FORMAT_HAS_PROX) {
					deletionCount = reader.readUInt32();
					hasProx = reader.readByte();
				}
				
				this._docCount += segSize;
				
				/*
				trace ("segName");
				trace (segName);
				trace ("segSize");
				trace (segSize);
				trace ("delGen");
				trace (delGen);
				trace ("hasSingleNormFile");
				trace (hasSingleNormFile);
				trace ("isCompoundFile");
				trace (isCompoundFile);
				*/				
								
				this._segmentsInfos[segName] = new SegmentInfo(this._directory,
																segName, 
																segSize, 
																delGen, 
																hasSingleNormFile,
																isCompoundFile);
																
				this._segInfoDic.push(segName);
				//trace ("-------------------------------");
			}
			
			
			/*
			debug
			var counter:int;
			for each (var elem:* in this._segmentsInfos) {
				counter++;
			}
			trace (counter);
			*/
		}
		
		public function find(query:String, field:String):Array
		{
			/**
			 * @todo parse query string
			 */
			var q:TermQuery = new TermQuery(new Term(query,field));
			
			q.execute(this);
			
			var topScore:Number = 0;
			
			var hits:Array = new Array();
			var ids:Array = new Array();
			var scores:Array = new Array();
			for (var id:* in q.matchedDocs()) {
				var docScore:Number = q.score(id, this);
				if (docScore != 0) {
					var hit:QueryHit = new QueryHit(this);
					hit.id = id;
					hit.score = docScore;
					
					hits.push(hit);
					ids.push(id);
					scores.push(docScore);
					
					if (docScore > topScore) {
						topScore = docScore;
					}
				}
			}
			
			if (hits.length == 0) {
				return new Array();
			}
			
			if (topScore > 1) {
				for each (var hit:QueryHit in hits) {
					hit.score /= topScore;
				}
			}
			
			 hits.sortOn("score", Array.NUMERIC | Array.DESCENDING);
			 
			return hits;
		}
		
		public function termDocs(term:lucene.index.Term):Array
		{
			
			var result:Array = [];
			var segmentStartDocId:Number = 0;
			
			//for each (var si:SegmentInfo in this._segmentsInfos ) {
			for(var countSegInfo:int=0; countSegInfo<this._segInfoDic.length; countSegInfo++) {
				var si:SegmentInfo = this._segmentsInfos[this._segInfoDic[countSegInfo]];
				var termInfo:* = si.getTermInfo(term);
				
				if (null == termInfo) {
					segmentStartDocId += si.count();
					continue;
				}
				
				trace (termInfo.docFreq);
				trace (termInfo.freqPointer);
				trace (termInfo.proxPointer);
				trace (termInfo.skipOffset);
				trace (termInfo.indexPointer);				
				
				var frqFile:FileHandler = si.getCompoundFile('.frq');
				//trace ("freqPointer "+termInfo.docFreq);
				frqFile.seek(termInfo.freqPointer, true);
				var docId = 0;
				for (var count:int=0; count<termInfo.docFreq; count++) {
					var docDelta = frqFile.readVInt();
					if (docDelta % 2 == 1) {
						docId += (docDelta-1)/2;
					}
					else {
						docId += docDelta/2;
						frqFile.readVInt();
					}
					
					result.push((segmentStartDocId + docId));
				}
				
				segmentStartDocId += si.count();
				
			}
			
			return result;
		}
		
		public function getFeildsList():Array
		{
			var fieldsList:Array = new Array();
			var fieldsListCheck:Array = new Array();
			for each (var seg:* in this._segmentsInfos) {
				for each (var fieldInfo:* in seg.getFieldsList()) {
					var name:String = fieldInfo.fieldName;
					if (fieldsListCheck.indexOf(name) == -1) {
						fieldsListCheck.push(name);
						fieldsList.push({label:name, value:fieldInfo.number});
					}
				}
			}
			
			return fieldsList;
		}
		
		public function getDocument (id:int):Document 
		{
			if (id >= this._docCount) {
				throw new Error('Document out of range');
			}
			
			var segStartDocId:int = 0;
			var segInfo:SegmentInfo;
			for each (var si:SegmentInfo in this._segmentsInfos) {
				if (segStartDocId + si.count() > id) {
					segInfo = si;
					break;
				}
				
				segStartDocId += si.count();
			}
			
			var fdxFile:FileHandler = segInfo.getCompoundFile('.fdx');
			fdxFile.seek((id - segStartDocId) * 8, true);
			
			var fieldValuePosition:Number = fdxFile.readUInt64();
			var fdtFile:FileHandler = segInfo.getCompoundFile('.fdt');
			fdtFile.seek(fieldValuePosition, true);
			var fieldCount:Number = fdtFile.readVInt();
			
			var doc:Document = new Document();
			for (var c:Number=0; c<fieldCount; c++) {
				var fieldNum:Number = fdtFile.readVInt();
				var bits:int = fdtFile.readByte();
				
				var fieldInfo:* = segInfo.getField(fieldNum);
				
				var field:Field;
				if (!(bits & 2)) {
					field = new Field(fieldInfo.fieldName, fdtFile.readString(), true, fieldInfo.isTokenized, bits & 1, false);
				}
				else {
					/**
					 * @todo manage binary field
					 */
				}
				
				doc.addField(field);
			}
			
			return doc;
		}
		
		public function termFreqs(term:Term):Array
		{
			var result:Array = new Array();
			var segStartDocId:int = 0;
			for each (var si:SegmentInfo in this._segmentsInfos) {
				var atf:Array = si.termFreqs(term, segStartDocId);
				for (var i:* in atf) {
					if (result[i] == undefined) {
						result[i] = atf[i];
					}
				}
				segStartDocId += si.count();
			}
			
			return result;
		}
		
	}
	
}