/**
 * 
 */
package edu.umd.clip.lm.playground;

import java.io.*;
import java.util.*;

import com.sleepycat.je.*;

import edu.berkeley.nlp.util.Option;
import edu.berkeley.nlp.util.OptionParser;
import edu.umd.clip.jobs.*;
import edu.umd.clip.lm.factors.FactorTupleDescription;
import edu.umd.clip.lm.model.Experiment;
import edu.umd.clip.lm.model.OnDiskCompactProbTree;
import edu.umd.clip.lm.storage.*;
import edu.umd.clip.lm.storage.AbstractProbTreeStorage.Key;
import edu.umd.clip.lm.storage.BDBProbTreeStorage.KeyBinding;
import edu.umd.clip.lm.storage.BDBProbTreeStorage.ProbTreeBinding;
import edu.umd.clip.lm.util.IO;

/**
 * @author Denis Filimonov <den@cs.umd.edu>
 *
 */
public class DbActualSize {
	public static class Options {
        @Option(name = "-config", required = true, usage = "XML config file")
		public String config;
       @Option(name = "-db", required = true, usage = "Berkeley DB environment")
		public String db;
        @Option(name = "-dump", required = false, usage = "dump the databases")
		public boolean dump;
	}

	private static void countDb(Environment env, String dbName, final String dumpFilename) throws IOException {
		KeyBinding kb = new BDBProbTreeStorage.KeyBinding(1);
		ProbTreeBinding pb = new BDBProbTreeStorage.ProbTreeBinding();
		FactorTupleDescription desc = Experiment.getInstance().getTupleDescription();
		
		final BufferedWriter writer;
		if (dumpFilename != null) {
			writer = IO.getWriter(IO.getOutputStream(dumpFilename));
		} else {
			writer = null;
		}
		
		try {
			DatabaseConfig dbConf = new DatabaseConfig();
			dbConf.setReadOnly(true);
			dbConf.setTransactional(false);
			Database db = env.openDatabase(null, dbName, dbConf);
			
			long elementCount = 0;
			long keySize = 0;
			long dataSize = 0;
			
			Cursor cursor = db.openCursor(null, CursorConfig.READ_UNCOMMITTED);
			
			DatabaseEntry key = new DatabaseEntry();
			DatabaseEntry data = new DatabaseEntry();
			OperationStatus status = cursor.getFirst(key, data, null);
			
			while(status == OperationStatus.SUCCESS) {
				++elementCount;
				keySize += key.getSize();
				dataSize += data.getSize();
				
				if (writer != null) {
					Key theKey = kb.entryToObject(key);
					OnDiskCompactProbTree probTree = pb.entryToObject(data);
					int word = desc.packOvertFactorsToInt(theKey.getFactorBits());
					
					StringBuilder sb = new StringBuilder();
					sb.append(word);
					sb.append(',');
					sb.append(theKey.getClusterid());
					sb.append(':');
					sb.append(probTree.getScale());
					sb.append('*');
					int tags[] = probTree.getCompactHiddenFactors();
					float probabilities[] = probTree.getProbabilities();
					for(int i=0; i<tags.length; ++i) {
						sb.append(tags[i]);
						sb.append(',');
						sb.append(probabilities[i]);
						sb.append(';');
					}
					sb.append('\n');
					writer.write(sb.toString());
				}
				status = cursor.getNext(key, data, null);
			}
			cursor.close();
			db.close();
			
			System.out.printf("%s has %d keys, key size = %d, data size = %d\n", dbName, elementCount, keySize, dataSize);
		} catch(DatabaseException e) {
			e.printStackTrace();
		}
		
		if (writer != null) {
			writer.close();
		}
	}
	/**
	 * @param args
	 */
	public static void main(String[] args) {
        OptionParser optParser = new OptionParser(Options.class);
        final Options opts = (Options) optParser.parse(args, true);
        
		Experiment.initialize(opts.config);
		Experiment experiment = Experiment.getInstance();
		experiment.getTupleDescription();

		JobManager.initialize(6);
		JobManager manager = JobManager.getInstance();
		Thread thread = new Thread(manager, "Job Manager");
		thread.setDaemon(true);
		thread.start();

		EnvironmentConfig envConf = new EnvironmentConfig();
		envConf.setAllowCreate(false);
		//envConf.setSharedCache(true);
		envConf.setTransactional(false);

		try {
			final Environment env = new Environment(new File(opts.db), envConf);
			List<String> dbNames = env.getDatabaseNames();
			JobGroup group = manager.createJobGroup("group");
			
			for(final String dbName : dbNames) {
				Runnable run = new Runnable() {

					@Override
					public void run() {
						try {
							countDb(env, dbName, opts.dump ? dbName + "-dump.txt" : null);
						} catch (IOException e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						}
					}
				};
				
				Job job = new Job(run, dbName);
				manager.addJob(group, job);
			}
			group.join();
			
			env.close();
		} catch (EnvironmentLockedException e) {
			e.printStackTrace();
		} catch (DatabaseException e) {
			e.printStackTrace();
		}

	}

}
