package blocksim;

import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;

import blocksim.chunking.ChunkingStrategy;
import blocksim.chunking.ContentBasedChunking;
import blocksim.chunking.FixedSizeChunking;
import blocksim.util.BlockInfo;
import blocksim.util.BlockInfoBinding;

import com.sleepycat.bind.tuple.TupleBinding;
import com.sleepycat.je.Cursor;
import com.sleepycat.je.Database;
import com.sleepycat.je.DatabaseConfig;
import com.sleepycat.je.DatabaseEntry;
import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;
import com.sleepycat.je.LockMode;
import com.sleepycat.je.OperationStatus;
import com.sleepycat.je.Transaction;

public class DBSimilarityDetection {

	private static int READ_BUFFER_SIZE = 16384;
	private MessageDigest simHashFunction;
	private EnvironmentConfig envConfig;
	private Environment env;
	private DatabaseConfig dbConfig;
	private Database db;
	private Transaction txn;
	
	public DBSimilarityDetection(String envName, String dbName) throws NoSuchAlgorithmException {
		this.simHashFunction = MessageDigest.getInstance("SHA");
		this.envConfig = new EnvironmentConfig();
		this.envConfig.setAllowCreate(true);
		this.envConfig.setTransactional(false);
		this.env = new Environment(new File(envName), envConfig);
		this.dbConfig = new DatabaseConfig();
		this.dbConfig.setAllowCreate(true);
		this.dbConfig.setTransactional(false);
		this.dbConfig.setSortedDuplicates(false);
		this.db = env.openDatabase(null, dbName, dbConfig);
	}
	
  /**
   * Detects the similarity of a new file divided in blocks specified by a list
   * of boundaries' positions. The new blocks are compared to a record of blocks
   * stored from previous files.
   * 
   * @param fileName
   *          New file to detect similarity
   * @param boundaries
   *          List of block boundaries positions
   * @throws IOException
   */
	@SuppressWarnings({ "rawtypes", "unchecked" })
	public int detectSimilarity(String fileName, List<Long> boundaries) throws IOException {
		int anySimilarBlock = 0;
		long blockBegin = 0;
		byte[] buffer = new byte[READ_BUFFER_SIZE];
		InputStream in = new BufferedInputStream(new FileInputStream(fileName));
		BlockInfo info = new BlockInfo();
		TupleBinding blockInfoBinding = new BlockInfoBinding();
		
		for (long blockEnd : boundaries) {
			long blockSize = blockEnd - blockBegin;
			processNextBlock(buffer, in, blockSize);
			byte[] hash = simHashFunction.digest();
			DatabaseEntry key = new DatabaseEntry(hash);
			DatabaseEntry data = new DatabaseEntry(hash);
			
			if (db.get(txn, key, data, LockMode.READ_UNCOMMITTED) != OperationStatus.NOTFOUND) {
				anySimilarBlock++;
				info = (BlockInfo) blockInfoBinding.entryToObject(data);
				info.updateBlockFrequency();
				blockInfoBinding.objectToEntry(info, data);
				db.put(txn, key, data);
			} else {
				info.setBlockSize(blockSize);
				blockInfoBinding.objectToEntry(info, data);
				db.put(txn, key, data);
			}
			blockBegin = blockEnd;
		}
		in.close();
		return anySimilarBlock;
	}
	
	/**
	 * Read block size bytes from input stream and update the hash function
	 * strategy with the new data block
	 * 
	 * @param buffer
	 *          Array with the associated buffer size
	 * @param in
	 *          Input data stream to read the data from
	 * @param blockSize
	 *          Size of the whole block to read from data stream
	 * @throws IOException
	 *           If the read operation fails
	 * @author marcus
	 */
	private void processNextBlock(byte[] buffer, InputStream in, long blockSize) throws IOException {
		long accBytesRead = 0;
		do {
			int len = (int) (Math.min(READ_BUFFER_SIZE, blockSize - accBytesRead));
			int bytesRead = in.read(buffer, 0, len);
			simHashFunction.update(buffer, 0, bytesRead);
			accBytesRead += bytesRead;
		} while (accBytesRead < blockSize);		
	}
	
	/**
	 * Close environment and database
	 */
	private void closeDB() {
		db.close();
		env.close();
	}
	
	/**
	 * Print total backup size, total disk space used and delta 
	 */
	@SuppressWarnings("rawtypes")
	private void BackupSize() {
		DatabaseEntry key = new DatabaseEntry();
		DatabaseEntry data = new DatabaseEntry();
		TupleBinding blockInfoBinding = new BlockInfoBinding();
		//long totalInfo = 0;
		long partialInfo = 0;
		
		Cursor cursor = db.openCursor(txn, null);
		while (cursor.getNext(key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
			BlockInfo info = (BlockInfo) blockInfoBinding.entryToObject(data);
			//totalInfo = totalInfo + (info.getBlockSize() * info.getBlockFrequency());
			partialInfo = partialInfo + info.getBlockSize();
		}
		//long delta = totalInfo - partialInfo;
		cursor.close();
		
		//System.out.println("Total Backup Size: " + totalInfo);
		System.out.println("Total Backup Size after Deduplication: " + partialInfo);
		//System.out.println("Disk Space Saved using Deduplication(delta): " + delta);
	}
	
	/* Main code 
	 * Usage: SimilarityDetection <backup number> <boundaryDetectionStrategy(SHA,MD5,Rabin,Fixed)>
	 * <m> <k> <a> <minBlockSize> <maxBlockSize>
	*/
	public static void main(String[] args) throws Exception {
		
		if (args.length < 1) {
			System.err.println("Usage: SimilarityDetection "
					+ "<File with a list of files to be submitted>"
					+ "<boundaryDetectionStrategy(SHA,MD5,Rabin,Fixed)> <m> <k> <p>"
					+ "<minBlockSize> <maxBlockSize>");
			System.exit(1);
		}
		
		String backupList = args[0];
		
		String boundHashFunction = args.length > 1 ? args[1]
				: ContentBasedChunking.BOUNDARY_HASHF_DEFAULT;
		
		ChunkingStrategy chunkingSt = null;
		
		// For the fixed-size blocks strategy, the m will be the block size
		int m = args.length > 2 ? Integer.parseInt(args[2]) : ContentBasedChunking.M_DEFAULT;
		int k = 0;
		int p = 0;
		long minBlockSize = 0;
		long maxBlockSize = 0;
		
		if (boundHashFunction.equalsIgnoreCase("fixed")) {
			chunkingSt = new FixedSizeChunking(m);
			minBlockSize = m;
			maxBlockSize = m;
		} else {
			k = args.length > 3 ? Integer.parseInt(args[3]) : ContentBasedChunking.K_DEFAULT;
			p = args.length > 4 ? Integer.parseInt(args[4]) : ContentBasedChunking.P_DEFAULT;
			minBlockSize = args.length > 5 ? Long.parseLong(args[5])
					: ContentBasedChunking.MIN_CHUNKSIZE_DEFAULT;
			maxBlockSize = args.length > 6 ? Long.parseLong(args[6])
					: ContentBasedChunking.MAX_CHUNKSIZE_DEFAULT;
			chunkingSt = new ContentBasedChunking(m, k, p, minBlockSize, maxBlockSize, boundHashFunction);
		}
		
		//initial time
		long initTime = System.currentTimeMillis();
		
		String dbName = "db-" + backupList + "_" + boundHashFunction + "_" + m;
		String envName = "env-" + backupList + "_" + boundHashFunction + "_" + m;
		Runtime.getRuntime().exec("mkdir " + envName);
		
		DBSimilarityDetection dbDec = new DBSimilarityDetection(envName, dbName);
		String filename = null;
		
		//get all files in the file list		
		BufferedReader stdInput = new BufferedReader(new FileReader(backupList));
		
		//do the magic
		while ((filename = stdInput.readLine()) != null) {
			boolean exists = (new File(filename)).exists();
			if (exists) {
				List<Long> boundaries = chunkingSt.detectBoundaries(filename);
				int simBlocks = dbDec.detectSimilarity(filename, boundaries);
				System.out.printf("%s\t%d\n", filename, simBlocks);
			}
		}		
		stdInput.close();
		
		//end time
		long endTime = System.currentTimeMillis();
		
		long totalTime = endTime - initTime;
		
		//get total backup size, backup size after deduplication, delta
		dbDec.BackupSize();
		
		//return total time
		System.out.println("Total deduplication time: " + totalTime);

		//close db and environment
		dbDec.closeDB();
		
	}

}
