/**
 * 
 */
package edu.umd.clip.lm.storage.compact;

import java.io.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.GZIPOutputStream;

import com.sleepycat.util.PackedInteger;

import edu.umd.clip.lm.model.*;
import edu.umd.clip.lm.util.*;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class WordFile {
	private final DbDescriptor descriptor;
	private CompactReadOnlyInt2LongHashMap clusterPointers;
	private byte data[];
	
	private final WritingModeData writingData;
	
	private static final AtomicLong totalProbTrees = new AtomicLong();
	private static final AtomicLong duplicateProbTrees = new AtomicLong();
	private static final AtomicLong totalSpaceSaved = new AtomicLong();
	
	private static class WritingModeData {
		DetachableOutputStream indexOutput;
		DetachableOutputStream dataOutput;
	}
	
	private static class Ptr {
		int offset;
		int length;
		public static final long INVALID = -1;
		/**
		 * @param offset
		 * @param length
		 */
		public Ptr(int offset, int length) {
			this.offset = offset;
			this.length = length;
		}
		
		public static Ptr fromLong(long val) {
			if (val == INVALID) return null;
			int length = (int) (val >>> 32);
			int offset = (int) (val & 0xFFFFFFFF);
			return new Ptr(offset, length);
		}
		
		public long toLong() {
			return toLong(offset, length);
		}
		
		public static long toLong(int offset, int length) {
			return (long) length << 32 | offset;
		}
	}
	
	public WordFile(DbDescriptor descriptor, DataInput in) throws IOException {
		this.descriptor = descriptor;
		this.writingData = null;
		read(in);
	}
	
	public WordFile(DbDescriptor descriptor, File tmpDir, int maxBufferSize, Observer fileCreationListener) throws IOException {
		this.descriptor = descriptor;
		writingData = new WritingModeData();
		
		maxBufferSize /= 2;
		int bufferIncrement = 8*1024;
		if (bufferIncrement > maxBufferSize) bufferIncrement = maxBufferSize;
		
		maxBufferSize -= maxBufferSize % bufferIncrement;
		
		File indexFile = File.createTempFile("index", null, tmpDir);
		writingData.indexOutput = new DetachableOutputStream(indexFile, maxBufferSize, bufferIncrement);
		writingData.indexOutput.addFileOpenListener(fileCreationListener);
		
		File dataFile = File.createTempFile("data", null, tmpDir);
		writingData.dataOutput = new DetachableOutputStream(dataFile, maxBufferSize, bufferIncrement);
		writingData.dataOutput.addFileOpenListener(fileCreationListener);
	}
	
	public void read(DataInput in) throws IOException {
		
		// read the index
		int indexSize = in.readInt();
		byte indexData[] = new byte[indexSize];
		in.readFully(indexData);
		
		GrowingIntArray clusters = new GrowingIntArray(10);
		GrowingLongArray pointers = new GrowingLongArray(10);
		int position = 0;
		int currentDataOffset = 0;
		
		while(position < indexSize) {
			GrowingIntArray tmpClusters = new GrowingIntArray(10);
			while(true) {
				int cluster = PackedInteger.readInt(indexData, position);
				
				int intLen = PackedInteger.getReadIntLength(indexData, position);
				position += intLen;
				
				if (cluster < 0) {
					cluster = -cluster;
					tmpClusters.add(cluster);
					break;
				}
				tmpClusters.add(cluster);
			}
			
			int dataLen = PackedInteger.readInt(indexData, position);
			int intLen = PackedInteger.getReadIntLength(indexData, position);
			position += intLen;
			
			//Ptr ptr = new Ptr(currentDataOffset, dataLen);
			
			
			for(int i=0; i<tmpClusters.size(); ++i) {
				clusters.add(tmpClusters.array()[i]);
				pointers.add(Ptr.toLong(currentDataOffset, dataLen));
			}
			currentDataOffset += dataLen;
		}
		clusters.trim();
		pointers.trim();
		clusterPointers = new CompactReadOnlyInt2LongHashMap(clusters.array(), pointers.array(), Ptr.INVALID);
		
		int dataSize = in.readInt();
		data = new byte[dataSize];
		in.readFully(data);
	}

	public OnDiskCompactProbTree getProbTree(int clusterid) {
		Ptr ptr = Ptr.fromLong(clusterPointers.get(clusterid));
		if (ptr == null) return null;
		
		int position = ptr.offset;
		
		// it takes at least two bytes per record
		int maxSize = ptr.length / 2;
		int factors[] = new int[maxSize];
		float probs[] = new float[maxSize];
		
		int numFactors = 0;
		long totalCount = 0;
		
		while(position < ptr.offset + ptr.length) {
			int factorIdx = PackedInteger.readInt(data, position);
			int intLen = PackedInteger.getReadIntLength(data, position);
			position += intLen;

			long count = PackedInteger.readLong(data, position);
			intLen = PackedInteger.getReadLongLength(data, position);
			position += intLen;
			
			factors[numFactors] = descriptor.getCompactFactors(factorIdx);
			probs[numFactors] = count;
			
			totalCount += count;
			++numFactors;
		}
		
		if (numFactors == 0 || totalCount == 0) {
			System.out.print("");
		}
		if (numFactors < maxSize) {
			factors = Arrays.copyOf(factors, numFactors);
			probs = Arrays.copyOf(probs, numFactors);
		}
		long clusterCount = descriptor.getClusterCount(clusterid);
		OnDiskCompactProbTree tree = new OnDiskCompactProbTree(factors, probs, 1.0 / clusterCount);
		tree.normalize();
		
		return tree;
	}
	
	public void setProbTree(int clusterid, OnDiskCompactProbTree probTree) {
		long totalClusterCount = descriptor.getClusterCount(clusterid);
		
		int tags[] = probTree.getCompactHiddenFactors();
		float probs[] = probTree.getProbabilities();
		double wordProb = probTree.getScale();
		
		double expectedTotalCount = wordProb * totalClusterCount;
		long totalCount = 0;
		
		int maxSize = PackedInteger.MAX_LENGTH * tags.length * 2;
		byte buf[] = new byte[maxSize];
		int position = 0;
		
		for(int i=0; i<probTree.getSize(); ++i) {
			long count = (long) Math.round(totalClusterCount * wordProb * probs[i]);
			if (count == 0) continue;
			totalCount += count;

			int factorIdx = descriptor.getCompactFactorsIndex(tags[i]);
			int intLen = PackedInteger.getWriteIntLength(factorIdx);
			PackedInteger.writeInt(buf, position, factorIdx);
			position += intLen;
			
			intLen = PackedInteger.getWriteLongLength(count);
			PackedInteger.writeLong(buf, position, count);
			position += intLen;
		}
		
		if (!ProbMath.approxEqual(totalCount, expectedTotalCount)) {
			System.out.print("");
		}
		int dataLen = position;
		
		try {
			synchronized(writingData) {
				writingData.dataOutput.write(buf, 0, position);
				
				if (buf.length < PackedInteger.MAX_LENGTH * 2) {
					buf = new byte[PackedInteger.MAX_LENGTH * 2];
				}
				position = 0;
	
				int intLen = PackedInteger.getWriteIntLength(clusterid);
				PackedInteger.writeInt(buf, position, clusterid);
				position += intLen;
				
				intLen = PackedInteger.getWriteIntLength(dataLen);
				PackedInteger.writeInt(buf, position, dataLen);
				position += intLen;
				
				writingData.indexOutput.write(buf, 0, position);
			}
		} catch(IOException e) {
			e.printStackTrace();
		}
	}
	
	public void finishWriting() {
		if (writingData != null) {
			synchronized(writingData) {
				try {
					writingData.dataOutput.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
				try {
					writingData.indexOutput.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
	}
	
	private static class ProbTreeCompactionKey {
		final byte buffer[];
		final int size;
		final int offset;
		final int hash;
		/**
		 * @param buffer
		 * @param size
		 * @param offset
		 */
		public ProbTreeCompactionKey(byte[] buffer, int size, int offset) {
			this.buffer = buffer;
			this.size = size;
			this.offset = offset;
			this.hash = hashCode();
		}
		
		public int hashCode() {
			int result = 13;
			for(int i=offset; i<offset+size; ++i) {
				result = result * 5 + buffer[i];
			}
			return result;
		}
		
		public boolean equals(Object other) {
			if (other != null && other instanceof ProbTreeCompactionKey) {
				ProbTreeCompactionKey o = (ProbTreeCompactionKey) other;
				
				if (o.size != size || o.hash != hash) return false;
				
				for(int i=0; i<size; ++i) {
					if (buffer[i+offset] != o.buffer[i+o.offset]) return false;
				}
				return true;
			}
			return false;
		}
	}
	
	public void compactify(FileOutputStream dest, boolean compress) throws IOException {
		File indexFile = writingData.indexOutput.getBackendFile();
		File dataFile = writingData.dataOutput.getBackendFile();
		
		RandomAccessFile index = new RandomAccessFile(indexFile, "r");
		int indexLen = (int) index.length();
		byte indexReadBuf[] = new byte[indexLen];
		index.readFully(indexReadBuf);
		index.close();
		
		RandomAccessFile data = new RandomAccessFile(dataFile, "r");
		int dataLen = (int) data.length();
		byte dataReadBuf[] = new byte[dataLen];
		data.readFully(dataReadBuf);
		data.close();
		
		byte indexWriteBuf[] = new byte[indexLen];
		byte dataWriteBuf[] = new byte[dataLen];
		
		int indexReadPos = 0;
		int dataReadPos = 0;
		int indexWritePos = 0;
		int dataWritePos = 0;
		
		HashMap<ProbTreeCompactionKey, GrowingIntArray> probTreeRefs = new HashMap<ProbTreeCompactionKey, GrowingIntArray>(indexLen / 5 + 1);
		
		while(indexReadPos < indexLen) {
			int intLen = PackedInteger.getReadIntLength(indexReadBuf, indexReadPos);
			int clusterid = PackedInteger.readInt(indexReadBuf, indexReadPos);
			indexReadPos += intLen;
			
			intLen = PackedInteger.getReadIntLength(indexReadBuf, indexReadPos);
			int clusterDataLen = PackedInteger.readInt(indexReadBuf, indexReadPos);
			indexReadPos += intLen;
			
			ProbTreeCompactionKey theKey = new ProbTreeCompactionKey(dataReadBuf, clusterDataLen, dataReadPos);
			GrowingIntArray clusterList = probTreeRefs.get(theKey);
			if (clusterList == null) {
				clusterList = new GrowingIntArray(4);
				probTreeRefs.put(theKey, clusterList);
			}
			clusterList.add(clusterid);
			
			dataReadPos += clusterDataLen;
		}		
		
		int duplicateProbTrees = 0;
		int totalProbTrees = 0;
		int spaceSaved = 0;
		
		for(Map.Entry<ProbTreeCompactionKey, GrowingIntArray> e : probTreeRefs.entrySet()) {
			ProbTreeCompactionKey theKey = e.getKey();
			GrowingIntArray clusterList = e.getValue();
			
			clusterList.trim();
			int clusters[] = clusterList.array();
			
			for(int i=0; i<clusters.length-1; ++i) {
				int clusterid = clusters[i];
				
				int intLen = PackedInteger.getWriteIntLength(clusterid);
				PackedInteger.writeInt(indexWriteBuf, indexWritePos, clusterid);
				indexWritePos += intLen;
			}
			
			int clusterid = -clusters[clusters.length-1];
			int intLen = PackedInteger.getWriteIntLength(clusterid);
			PackedInteger.writeInt(indexWriteBuf, indexWritePos, clusterid);
			indexWritePos += intLen;
			
			intLen = PackedInteger.getWriteIntLength(theKey.size);
			PackedInteger.writeInt(indexWriteBuf, indexWritePos, theKey.size);
			indexWritePos += intLen;
			
			// copy data
			System.arraycopy(dataReadBuf, theKey.offset, dataWriteBuf, dataWritePos, theKey.size);
			dataWritePos += theKey.size;

			totalProbTrees += clusters.length;
			if (clusters.length > 1) {
				duplicateProbTrees += clusters.length - 1;
				spaceSaved += (clusters.length - 1) * (theKey.size + PackedInteger.getWriteIntLength(theKey.size));
			}
		}
		WordFile.totalProbTrees.addAndGet(totalProbTrees);
		WordFile.duplicateProbTrees.addAndGet(duplicateProbTrees);
		WordFile.totalSpaceSaved.addAndGet(spaceSaved);
		
		/*
		while(indexReadPos < indexLen) {
			int intLen = PackedInteger.getReadIntLength(indexReadBuf, indexReadPos);
			int clusterid = PackedInteger.readInt(indexReadBuf, indexReadPos);
			indexReadPos += intLen;
			
			intLen = PackedInteger.getReadIntLength(indexReadBuf, indexReadPos);
			int clusterDataLen = PackedInteger.readInt(indexReadBuf, indexReadPos);
			indexReadPos += intLen;
			
			// read and write data
			System.arraycopy(dataReadBuf, dataReadPos, dataWriteBuf, dataWritePos, clusterDataLen);
			dataReadPos += clusterDataLen;
			dataWritePos += clusterDataLen;
			
			// write index entry
			clusterid = -clusterid; 
			
			intLen = PackedInteger.getWriteIntLength(clusterid);
			PackedInteger.writeInt(indexWriteBuf, indexWritePos, clusterid);
			indexWritePos += intLen;
			
			intLen = PackedInteger.getWriteIntLength(clusterDataLen);
			PackedInteger.writeInt(indexWriteBuf, indexWritePos, clusterDataLen);
			indexWritePos += intLen;
		}
		*/
		int maxLength = 8 + indexWritePos + dataWritePos; // buffers plus two ints (sizes)
		
		ByteArrayOutputStream bufOutput = new ByteArrayOutputStream(maxLength);
		DataOutputStream output;
		if (compress) {
			output = new DataOutputStream(new GZIPOutputStream(bufOutput, 4096));
		} else {
			output = new DataOutputStream(bufOutput);
		}
		
		output.writeInt(indexWritePos);
		output.write(indexWriteBuf, 0, indexWritePos);
		
		output.writeInt(dataWritePos);
		output.write(dataWriteBuf, 0, dataWritePos);
		output.close();
		
		bufOutput.writeTo(dest);
		
		indexFile.delete();
		dataFile.delete();
	}

	public static long getTotalprobtrees() {
		return totalProbTrees.longValue();
	}

	public static long getDuplicateprobtrees() {
		return duplicateProbTrees.longValue();
	}

	public static long getTotalspacesaved() {
		return totalSpaceSaved.longValue();
	}
}
